hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f264e9ca945cc8836c2e599b584988d405644fda
| 3,364
|
py
|
Python
|
subtasks/MIMIC3 Database Extraction/download_maker.py
|
AlanPalomino/TTB_2020_2_13.
|
96586480c3120e2733ba25cc83cbc2dfb1628070
|
[
"MIT"
] | 1
|
2021-02-02T02:17:27.000Z
|
2021-02-02T02:17:27.000Z
|
subtasks/MIMIC3 Database Extraction/download_maker.py
|
AlanPalomino/TTB_2020_2_13.
|
96586480c3120e2733ba25cc83cbc2dfb1628070
|
[
"MIT"
] | null | null | null |
subtasks/MIMIC3 Database Extraction/download_maker.py
|
AlanPalomino/TTB_2020_2_13.
|
96586480c3120e2733ba25cc83cbc2dfb1628070
|
[
"MIT"
] | 2
|
2020-11-23T17:36:49.000Z
|
2021-02-02T02:19:37.000Z
|
#!usr/bin/env python3
# _*_ coding: utf-8 _*_ #
#
#___________________________________________________________________________
#| |
#| Generación de archivos JSON de MIMIC-III |
#| |
#| |
#|_________________________________________________________________________|
# ===================== Librerias Utilizadas ====================== #
from pathlib import Path
import os
import csv
def complete_db():
patient_files = [ file for file in os.listdir() if file.endswith(".txt") and file != "RECORDS.txt" ]
data = list()
with open("RECORDS.txt", "r") as r:
RECORDS = r.read()
for condition_file in patient_files:
condition = Path(condition_file).stem
with open(condition_file, "r") as f:
condition_data = [(condition, RECORDS[idx-4:idx+7]) for idx in
[RECORDS.find(f"p{int(line):06d}") for line in
f.read().splitlines() if RECORDS.find(f"p{int(line):06d}") != -1]]
data.extend(condition_data)
print(f"- {condition} has {len(condition_data)} records.")
with open("download_complete.sh", "w+", newline="\n") as f:
for condition, record_route in data:
f.write(f"""
mkdir {condition}_{record_route[4:]}
cd {condition}_{record_route[4:]}
if [[ ! -d $PWD/{record_route[4:]} ]]; then
wget -r -nc -np -c https://physionet.org/static/published-projects/mimic3wdb-matched/1.0/{record_route}/
mv physionet.org/static/published-projects/mimic3wdb-matched/1.0/{record_route} .
fi
rm -r physionet.org/
cd ..
""")
with open("download_complete.sh", "r") as f:
print(f"\n{len(f.readlines())//10} of {len(data)} records in the bash downloader.")
def worksample_db():
patient_files = [ file for file in os.listdir() if file.endswith(".txt") and file != "RECORDS.txt" ]
data = list()
with open("RECORDS.txt", "r") as r:
RECORDS = r.read()
for condition_file in patient_files:
condition = Path(condition_file).stem
i = 0
with open(condition_file, "r") as f:
condition_data = [(condition, RECORDS[idx-4:idx+7]) for idx in
[RECORDS.find(f"p{int(line):06d}") for line in
f.read().splitlines() if RECORDS.find(f"p{int(line):06d}") != -1]]
data.extend(condition_data[:3])
print(f"- {condition} has {len(condition_data)} records.")
with open("download_worksample.sh", "w+", newline="\n") as f:
for condition, record_route in data:
f.write(f"""
mkdir {condition}_{record_route[4:]}
cd {condition}_{record_route[4:]}
if [[ ! -d $PWD/{record_route[4:]} ]]; then
wget -r -nc -np -c https://physionet.org/static/published-projects/mimic3wdb-matched/1.0/{record_route}/
mv physionet.org/static/published-projects/mimic3wdb-matched/1.0/{record_route} .
fi
rm -r physionet.org/
cd ..
""")
def main():
complete_db()
worksample_db()
if __name__ == "__main__":
main()
| 40.047619
| 109
| 0.550238
| 390
| 3,364
| 4.235897
| 0.253846
| 0.079903
| 0.072639
| 0.031477
| 0.843826
| 0.823245
| 0.823245
| 0.823245
| 0.823245
| 0.823245
| 0
| 0.016115
| 0.299049
| 3,364
| 83
| 110
| 40.53012
| 0.684478
| 0.166171
| 0
| 0.721311
| 0
| 0.081967
| 0.399409
| 0.150609
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04918
| false
| 0
| 0.04918
| 0
| 0.098361
| 0.04918
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f29317a39af9ead75b47215c6c4f5ce38f4da82d
| 15,483
|
py
|
Python
|
IObasics/_MakeGraph.py
|
softmatter-design/pythonScripts
|
cd4cd6e3ae55bd2debcbb160981d3738125d4af3
|
[
"MIT"
] | null | null | null |
IObasics/_MakeGraph.py
|
softmatter-design/pythonScripts
|
cd4cd6e3ae55bd2debcbb160981d3738125d4af3
|
[
"MIT"
] | null | null | null |
IObasics/_MakeGraph.py
|
softmatter-design/pythonScripts
|
cd4cd6e3ae55bd2debcbb160981d3738125d4af3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
import numpy as np
import platform
import subprocess
import os
################################################################################
class MakeHistNW:
def __init__(self, cond_list, target_name):
# cond_list = [base_name, data_list, bins, normalize, Legend, option]
self.list = cond_list[1]
self.bins = cond_list[2]
#
self.dir = target_name
#
self.base = cond_list[0]
self.norm = cond_list[3]
#
self.f_dat = "nw_hist.dat"
self.f_plt = "make_hist.plt"
self.f_png = "histgram.png"
self.leg = cond_list[4]
self.option = cond_list[5]
############################################################
# ヒストグラムのグラフの作成
def make_hist_all(self):
# ヒストグラムのデータ作成
bin_width, hist_data, val, x = self.make_hist_data()
# ヒストグラムのデータを書き出し
self.write_data(hist_data, bin_width)
# グラフを作成
self.make_graph(bin_width)
return x, val
############################################################
# ヒストグラムのデータ作成
def make_hist_data(self):
# ヒストグラムを作成
weights = np.ones(len(self.list))/float(len(self.list))
if self.norm:
val, x = np.histogram(self.list, bins=self.bins, weights= weights)
else:
val, x = np.histogram(self.list, bins=self.bins)
# グラフ用にデータを変更
bin_width = (x[1]-x[0])
mod_x = (x + bin_width/2)[:-1]
hist_data = np.stack([mod_x, val], axis = 1)
return bin_width, hist_data, val, x
##############################
# ヒストグラムのデータを書き出し
def write_data(self, hist_data, bin_width):
os.makedirs(self.dir, exist_ok=True)
with open(os.path.join(self.dir, self.f_dat), 'w') as f:
f.write("# Histgram data:\n\n")
for line in hist_data:
f.write(str(line[0]) + '\t' + str(line[1]) + '\n')
return
###########################################################
# グラフを作成
def make_graph(self, bin_width):
self.make_script(bin_width)
cwd = os.getcwd()
os.chdir(self.dir)
if platform.system() == "Windows":
subprocess.call(self.f_plt, shell=True)
elif platform.system() == "Linux":
subprocess.call('gnuplot ' + self.f_plt, shell=True)
os.chdir(cwd)
return
#######################
# 必要なスクリプトを作成
def make_script(self, bin_width):
with open(os.path.join(self.dir, self.f_plt), 'w') as f:
script = self.script_content(bin_width)
f.write(script)
return
#################
# スクリプトの中身
def script_content(self, bin_width):
script = 'set term pngcairo font "Arial,14" \nset colorsequence classic \n'
#
script += '# \ndata = "' + self.f_dat + '" \nset output "' + self.f_png + ' "\n'
#
script += '#\nset size square\n# set xrange [0:1.0]\n#set yrange [0:100]\n'
script += '#\nset xlabel "' + self.leg[0] + '"\nset ylabel "' + self.leg[1] + '"\n\n'
#
if self.base == "Rx":
script += '#f = 3\n#N = 39\n#R1 = (1.75*N)**0.5\n#Pos = R1/2**0.5\n#delta = Pos*(2./3.)**0.5\n\n'
script += 'f = 4\nN = 39\nR1 = (1.75*N)**0.5\nPos = R1/3**0.5\ndelta = Pos*(2./3.)**0.5\n\n'
script += 'C=0.25\nf(x) = C*(1./2.)*(1./(delta*(3.142*2.)**0.5))*(exp(-1.*((x-Pos)**2)/(2.*delta**2)) + exp(-1.*((x+Pos)**2)/(2.*delta**2)))\n\n'
#
if self.base == "R":
script += 'N = 39\nb=0.97\nCN=1.7*1.5\nC=0.02\n'
script += 'f(x, CN) = C*4*pi*x**2*(3/(2*pi*N*CN*b**2))**(3/2)*exp(-3*x**2/(2*N*CN*b**2))\n'
script += '#fit f(x, CN) data via CN, C\n'
#
if self.base == "angle":
if self.option != "box":
script += 'plot data u 1:($2/(3.142*sin(3.142*$1/180))) w l noti'
else:
script += 'set style fill solid 0.5\nset boxwidth ' + str(bin_width) + '\n'
script += 'plot data u 1:($2/(3.142*sin(3.142*$1/180))) w boxes noti'
elif self.option == "box":
script += 'set style fill solid 0.5\nset boxwidth ' + str(bin_width) + '\n'
script += '#\nplot data w boxes noti'
else:
if self.base == "Rx":'#\nplot data w l noti'
if self.base == "Rx":
script += ', \\\n f(x)'
elif self.base == "R":
script += ', \\\n f(x, CN)'
return script
################################################################################
class MakeHist:
def __init__(self, cond_list, target_name):
# cond_list = [base_name, data_list, bins, normalize, Legend, option]
self.list = cond_list[1]
self.bins = cond_list[2]
if target_name != '':
self.dir = os.path.join(target_name, cond_list[0])
else:
self.dir = cond_list[0]
self.base = cond_list[0]
self.norm = cond_list[3]
#
self.f_dat = cond_list[0] + "_hist.dat"
self.f_plt = cond_list[0] + ".plt"
self.f_png = cond_list[0] + ".png"
self.leg = cond_list[4]
self.option = cond_list[5]
############################################################
# ヒストグラムのグラフの作成
def make_hist_all(self):
# ヒストグラムのデータ作成
bin_width, hist_data = self.make_hist_data()
# ヒストグラムのデータを書き出し
self.write_data(hist_data, bin_width)
# グラフを作成
self.make_graph(bin_width)
return
############################################################
# ヒストグラムのデータ作成
def make_hist_data(self):
# ヒストグラムを作成
weights = np.ones(len(self.list))/float(len(self.list))
if self.norm:
val, x = np.histogram(self.list, bins=self.bins, weights= weights)
else:
val, x = np.histogram(self.list, bins=self.bins)
# グラフ用にデータを変更
bin_width = (x[1]-x[0])
mod_x = (x + bin_width/2)[:-1]
hist_data = np.stack([mod_x, val], axis = 1)
return bin_width, hist_data
##############################
# ヒストグラムのデータを書き出し
def write_data(self, hist_data, bin_width):
os.makedirs(self.dir, exist_ok=True)
with open(os.path.join(self.dir, self.f_dat), 'w') as f:
f.write("# Histgram data:\n\n")
for line in hist_data:
f.write(str(line[0]) + '\t' + str(line[1]) + '\n')
return
###########################################################
# グラフを作成
def make_graph(self, bin_width):
self.make_script(bin_width)
cwd = os.getcwd()
os.chdir(self.dir)
if platform.system() == "Windows":
subprocess.call(self.f_plt, shell=True)
elif platform.system() == "Linux":
subprocess.call('gnuplot ' + self.f_plt, shell=True)
os.chdir(cwd)
return
#######################
# 必要なスクリプトを作成
def make_script(self, bin_width):
with open(os.path.join(self.dir, self.f_plt), 'w') as f:
script = self.script_content(bin_width)
f.write(script)
return
#################
# スクリプトの中身
def script_content(self, bin_width):
script = 'set term pngcairo font "Arial,14" \nset colorsequence classic \n'
#
script += '# \ndata = "' + self.f_dat + '" \nset output "' + self.f_png + ' "\n'
#
script += '#\nset size square\n#set xrange [0:]\n#set yrange [0:100]\n'
script += '#\nset xlabel "' + self.leg[0] + '"\nset ylabel "' + self.leg[1] + '"\n\n'
#
if self.base == "Rx":
n_seg = self.option[0]
bond = self.option[1]
script += 'N = ' + str(n_seg) + '\n'
script += 'bond = ' + str(bond) + '\n'
script += 'CN=1.7\n'
script += 'C=0.1\n\n'
#
script += 'f(x) = C*(3/(2*pi*N*CN*bond**2))**(3/2)*exp(-3*x**2/(2*N*CN*bond**2))\n\n'
script += 'fit f(x) data via C, CN\n\n'
script += '#\nset label 1 sprintf("C_N=%.3f", CN) at graph 0.7, 0.8\n\n'
#
script += 'set style fill solid 0.5\nset boxwidth ' + str(bin_width) + '\n'
script += '#\nplot data w boxes noti'
script += ', \\\n f(x)'
# if self.base == "Rx":
# if (type(self.option) == list) and len(self.option) == 4:
# n_seg = self.option[0]
# bond = self.option[1]
# cn = self.option[2]
# func = self.option[3]
# elif (type(self.option) == list) and len(self.option) == 2:
# n_seg = self.option[0]
# bond = self.option[1]
# cn = 1.7
# func = 0
# else:
# n_seg = 39
# bond = 0.97
# cn = 1.7
# func = 4
# script += 'N = ' + str(n_seg) + '\n'
# script += 'bond = ' + str(bond) + '\n'
# script += 'CN = ' + str(cn) + '\n'
# script += 'f = ' + str(func) + '\n'
# #
# script += 'R1 = CN*(N**0.5)*bond\n'
# script += 'C=0.25\n\n'
# #
# if func == 3:
# script += 'Pos = R1/2**0.5\ndelta = Pos*(1. - 2./f)**0.5\n\n'
# script += 'f(x) = C*(1./2.)*(1./(delta*(3.142*2.)**0.5))*(exp(-1.*((x-Pos)**2)/(2.*delta**2)) + exp(-1.*((x+Pos)**2)/(2.*delta**2)))\n\n'
# script += 'fit f(x) data via C\n\n'
# elif func == 4:
# script += 'Pos = R1/3**0.5\ndelta = Pos*(1. - 2./f)**0.5\n\n'
# script += 'f(x) = C*(1./2.)*(1./(delta*(3.142*2.)**0.5))*(exp(-1.*((x-Pos)**2)/(2.*delta**2)) + exp(-1.*((x+Pos)**2)/(2.*delta**2)))\n\n'
# script += 'fit f(x) data via C\n\n'
# #
# script += 'set style fill solid 0.5\nset boxwidth ' + str(bin_width) + '\n'
# script += '#\nplot data w boxes noti'
# script += ', \\\n f(x)'
#
if self.base == "R":
if (type(self.option) == list) and len(self.option) == 4:
n_seg = self.option[0]
bond = self.option[1]
cn = self.option[2]
func = self.option[3]
elif (type(self.option) == list) and len(self.option) == 2:
n_seg = self.option[0]
bond = self.option[1]
cn = 1.7
func = 0
else:
n_seg = 39
bond = 0.97
cn = 1.7
func = 4
script += 'N = ' + str(n_seg) + '\n'
script += 'bond = ' + str(bond) + '\n'
script += 'CN = ' + str(cn) + '\n'
script += 'f = ' + str(func) + '\n'
script += 'C = 0.02\n\n'
script += 'f(x, CN) = C*4.*pi*x**2.*(3./(2.*pi*N*CN*bond**2.))**(3./2.)*exp(-3.*x**2./(2.*N*CN*bond**2.))\n'
script += 'fit f(x, CN) data via CN, C\n\n'
script += '#\nset label 1 sprintf("C_N=%.3f", CN) at graph 0.7, 0.8\n\n'
script += 'set style fill solid 0.5\nset boxwidth ' + str(bin_width) + '\n'
script += '#\nplot data w boxes noti'
script += ', \\\n f(x, CN)'
#
if self.base == "angle":
if self.option != "box":
script += 'plot data u 1:($2/(3.142*sin(3.142*$1/180))) w l noti'
else:
script += 'set style fill solid 0.5\nset boxwidth ' + str(bin_width) + '\n'
script += 'plot data u 1:($2/(3.142*sin(3.142*$1/180))) w boxes noti'
elif self.option == "box":
script += 'set style fill solid 0.5\nset boxwidth ' + str(bin_width) + '\n'
script += '#\nplot data w boxes noti'
return script
#############################################################################################
class MakeMulti:
def __init__(self, cond_list, target_name):
# cond_list = [base_name, data_list, Legend]
self.list = cond_list[1]
if target_name != '':
self.dir = os.path.join(target_name, cond_list[0])
else:
self.dir = cond_list[0]
self.base = cond_list[0]
self.repeat = len(cond_list[1])
#
self.f_dat = cond_list[0] + ".dat"
self.f_plt = cond_list[0] + ".plt"
self.f_png = cond_list[0] + ".png"
self.leg = cond_list[2]
############################################################
# マルチリストのグラフの作成
def make_all(self):
# データを書き出し
self.write_data()
# グラフを作成
self.make_graph()
return
##############################
# データを書き出し
def write_data(self):
os.makedirs(self.dir, exist_ok=True)
with open(os.path.join(self.dir, self.f_dat), 'w') as f:
f.write("# data:\n")
if self.base != "CN_ave":
for i, data in enumerate(self.list):
f.write("\n\n# " + str(i) +":\n\n")
for line in data:
f.write(str(line[0]) + '\t' + str(line[1]) + '\n')
else:
for line in self.list:
f.write(str(line[0]) + '\t' + str(line[1]) + '\n')
return
###########################################################
# グラフを作成
def make_graph(self):
self.make_script()
cwd = os.getcwd()
os.chdir(self.dir)
if platform.system() == "Windows":
subprocess.call(self.f_plt, shell=True)
elif platform.system() == "Linux":
subprocess.call('gnuplot ' + self.f_plt, shell=True)
os.chdir(cwd)
return
#######################
# 必要なスクリプトを作成
def make_script(self):
with open(os.path.join(self.dir, self.f_plt), 'w') as f:
script = self.script_content()
f.write(script)
return
#################
# スクリプトの中身
def script_content(self):
script = 'set term pngcairo font "Arial,14" \nset colorsequence classic \n'
#
script += '# \ndata = "' + self.f_dat + '" \nset output "' + self.f_png + ' "\n'
#
script += '#\nset size square\n#set xrange [1:]\n#set yrange [1:]\n'
script += '#\nset xlabel "' + self.leg[0] + '"\nset ylabel "' + self.leg[1] + '"\n\n'
#
if self.base == "CN" or self.base == "CN_ave":
script += '#\nset xrange [1:]\nset yrange [1:]\n'
script += 'set key bottom\n\n'
script += 'ct = 0.274\n'
script += "f(x) = (1+ct)/(1-ct) -(2*ct*(1-ct**x))/(1-ct)**2/x\n\n"
script += 'plot '
if self.base == "CN":
for i in range(self.repeat):
script += 'data ind ' + str(i) + ' w l lc ' + str(i) + 'noti, \\\n'
else:
script += 'data w l ti "averaged", \\\n'
script += 'f(x) w l lw 2 ti "theory"'
else:
script += 'plot '
for i in range(self.repeat):
script += 'data ind ' + str(i) + ' w l lc ' + str(i) + 'noti, \\\n'
return script
#############################################################################################
class MakeSimple:
def __init__(self, cond_list, target_name):
# cond_list = [base_name, data_list, Legend, option]
self.list = cond_list[1]
if target_name != '':
self.dir = os.path.join(target_name, cond_list[0])
else:
self.dir = cond_list[0]
self.base = cond_list[0]
self.repeat = len(cond_list[1])
#
self.f_dat = cond_list[0] + ".dat"
self.f_plt = cond_list[0] + ".plt"
self.f_png = cond_list[0] + ".png"
self.leg = cond_list[2]
#
self.option = cond_list[3]
############################################################
# マルチリストのグラフの作成
def make_all(self):
# データを書き出し
self.write_data()
# グラフを作成
self.make_graph()
return
##############################
# データを書き出し
def write_data(self):
if not os.path.exists(self.dir):
os.mkdir(self.dir)
with open(os.path.join(self.dir, self.f_dat), 'w') as f:
f.write("# data:\n")
for i, data in enumerate(self.list):
f.write(str(data[0]) + '\t' + str(data[1]) + '\n')
return
###########################################################
# グラフを作成
def make_graph(self):
self.make_script()
cwd = os.getcwd()
os.chdir(self.dir)
if platform.system() == "Windows":
subprocess.call(self.f_plt, shell=True)
elif platform.system() == "Linux":
subprocess.call('gnuplot ' + self.f_plt, shell=True)
os.chdir(cwd)
return
#######################
# 必要なスクリプトを作成
def make_script(self):
with open(os.path.join(self.dir, self.f_plt), 'w') as f:
script = self.script_content()
f.write(script)
return
#################
# スクリプトの中身
def script_content(self):
script = 'set term pngcairo font "Arial,14" \nset colorsequence classic \n'
#
script += '# \ndata = "' + self.f_dat + '" \nset output "' + self.f_png + ' "\n'
#
script += '#\nset size square\n\n'
script += '#\nset xlabel "' + self.leg[0] + '"\nset ylabel "' + self.leg[1] + '"\n\n'
#
if self.option == 'semilog' and self.base == 'NM':
script += '#setxrange [0:1.0]\nset yrange [0.01:1]\n'
script += '#\nset logscale y\nset format y "10^{%L}"\n\n'
script += 'a=1\ntau=100\ns=100\ne=10000\n'
script += 'fit [s:e] a*exp(-1*x/tau) data usi 1:2 via a,tau\n\n'
script += 'set label 1 sprintf("Fitted {/Symbol t} = %.1e", tau) at graph 0.3, 0.8\n'
script += 'set label 2 sprintf("Fitting Region: %d to %d", s, e) at graph 0.3, 0.7\n\n'
script += 'plot data w l lt 1 noti, \\\n [s:e] a*exp(-1*x/tau) lt 2 noti\n\n'
else:
script += '#setxrange [0:1.0]\n#set yrange [0:100]\n'
script += 'plot data w l noti\n\n'
return script
| 32.391213
| 148
| 0.535813
| 2,407
| 15,483
| 3.35189
| 0.089323
| 0.052058
| 0.019831
| 0.013882
| 0.885597
| 0.859073
| 0.850397
| 0.846926
| 0.843208
| 0.831309
| 0
| 0.034354
| 0.187819
| 15,483
| 477
| 149
| 32.45912
| 0.607237
| 0.123426
| 0
| 0.767516
| 0
| 0.063694
| 0.264655
| 0.045795
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082803
| false
| 0
| 0.012739
| 0
| 0.178344
| 0.012739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b5f4fb2c643579ef930b3cc1757d205ef056ce1
| 12,002
|
py
|
Python
|
scripts/alignReadsWithSTAR.py
|
sagnikbanerjee15/NGPINT
|
bfeef9de296143f09656c127cbf17c89ca3e80df
|
[
"MIT"
] | null | null | null |
scripts/alignReadsWithSTAR.py
|
sagnikbanerjee15/NGPINT
|
bfeef9de296143f09656c127cbf17c89ca3e80df
|
[
"MIT"
] | null | null | null |
scripts/alignReadsWithSTAR.py
|
sagnikbanerjee15/NGPINT
|
bfeef9de296143f09656c127cbf17c89ca3e80df
|
[
"MIT"
] | null | null | null |
import os
def alignReadsWithStarForTrimming(options,logger_proxy,logging_mutex):
"""
Aligns the untrimmed reads to genome and also generates the
mappings to transcriptome
"""
cmd = f"STAR "
cmd += f" --genomeLoad Remove "
os.system(cmd)
for num,eachtype in enumerate([options.selected_sample_N_removed,options.background_sample_N_removed]):
for file_num,filename in enumerate(eachtype):
#filename=filename.split("/")[-1].split(".fastq")[0]
cmd="STAR "
cmd+=" --runThreadN "+str(options.CPU)+" "
cmd+=" --genomeDir "+options.star_genome_index
cmd+=" --genomeLoad LoadAndKeep "
if options.selected_ended=="SE" and options.background_ended=="SE":
cmd+=" --readFilesIn "+filename
else:
if file_num%2==1:
#print(num,file_num,eachtype[file_num-1],filename)
cmd+=" --readFilesIn "+eachtype[file_num-1]+" "+filename
else:
continue
if num==0:
cmd+=" --outFileNamePrefix "+options.selected_sample_STAR_prefix_round1[file_num]
else:
cmd+=" --outFileNamePrefix "+options.background_sample_STAR_prefix_round1[file_num]
cmd+=" --outSAMtype SAM "
#cmd+=" --outReadsUnmapped Fastx "
cmd+=" --outFilterMultimapNmax 500 "
cmd+=" --limitOutSAMoneReadBytes 1000000 "
cmd+=" --outFilterScoreMinOverLread 0.30 --outFilterMatchNminOverLread 0.30 "
cmd+=" --alignIntronMax 10000 "
cmd+=" --quantMode TranscriptomeSAM "
cmd+=" --quantTranscriptomeBan Singleend "
#cmd+=" --seedPerWindowNmax 100 "
#cmd+=" --seedPerReadNmax 2000 "
if num==0:
cmd+=" > "+options.selected_sample_STAR_round1_output[file_num]
cmd+=" 2> "+options.selected_sample_STAR_round1_error[file_num]
else:
cmd+=" > "+options.background_sample_STAR_round1_output[file_num]
cmd+=" 2> "+options.background_sample_STAR_round1_error[file_num]
with logging_mutex:
logger_proxy.info(f"Running command {cmd}")
os.system(cmd)
if num==0:
cmd="rm "+options.selected_sample_STAR_prefix_round1[file_num]+"Log.out "
cmd+=options.selected_sample_STAR_prefix_round1[file_num]+"Log.progress.out "
cmd+=options.selected_sample_STAR_prefix_round1[file_num]+"SJ.out.tab "
cmd+=options.selected_sample_STAR_round1_output[file_num]
else:
cmd="rm "+options.background_sample_STAR_prefix_round1[file_num]+"Log.out "
cmd+=options.background_sample_STAR_prefix_round1[file_num]+"Log.progress.out "
cmd+=options.background_sample_STAR_prefix_round1[file_num]+"SJ.out.tab "
cmd+=options.background_sample_STAR_round1_output[file_num]
with logging_mutex:
logger_proxy.info(f"Running command {cmd}")
os.system(cmd)
with logging_mutex:
logger_proxy.info("STAR round1 mapping for "+filename+" completed")
#logger.info("STAR round1 mapping for "+filename+" completed")
cmd="STAR "
cmd+=" --genomeLoad Remove "
cmd+=" --genomeDir "+options.star_genome_index
os.system(cmd)
def reAlignReadsMappedToVector(options,logger_proxy,logging_mutex):
"""
Selects fusion reads which are not present in the transcriptome file
and realign those to the genome and update both genome and
transcriptome files
"""
# Mapping fusion reads to genome
for num,eachtype in enumerate([options.selected_sample_N_removed,options.background_sample_N_removed]):
for file_num,filename in enumerate(eachtype):
#filename=filename.split("/")[-1].split(".fastq")[0]
if options.selected_ended=="PE" and options.background_ended=="PE":
if file_num%2==0:continue
cmd="STAR "
cmd+=" --runThreadN "+str(options.CPU)+" "
cmd+=" --genomeDir "+options.star_genome_index
cmd+=" --genomeLoad LoadAndKeep "
if num==0:
if options.selected_ended=="SE" and options.background_ended=="SE":
cmd+=" --readFilesIn "+options.selected_sample_fusion_reads[file_num]
else:
cmd+=" --readFilesIn "+options.selected_sample_fusion_reads[file_num-1]+" "+options.selected_sample_fusion_reads[file_num]
cmd+=" --outFileNamePrefix "+options.selected_sample_STAR_prefix_round2[file_num]
else:
if options.selected_ended=="SE" and options.background_ended=="SE":
cmd+=" --readFilesIn "+options.background_sample_fusion_reads[file_num]
else:
cmd+=" --readFilesIn "+options.background_sample_fusion_reads[file_num-1]+" "+options.background_sample_fusion_reads[file_num]
cmd+=" --outFileNamePrefix "+options.background_sample_STAR_prefix_round2[file_num]
cmd+=" --outSAMtype SAM "
#cmd+=" --outReadsUnmapped Fastx "
cmd+=" --outFilterMultimapNmax 500 "
cmd+=" --outFilterScoreMinOverLread 0.3 --outFilterMatchNminOverLread 0.3 "
cmd+=" --alignIntronMax 10000 "
#cmd+=" --quantMode TranscriptomeSAM "
#cmd+=" --quantTranscriptomeBan Singleend "
if num==0:
cmd+=" > "+options.selected_sample_STAR_round2_output[file_num]
cmd+=" 2> "+options.selected_sample_STAR_round2_error[file_num]
else:
cmd+=" > "+options.background_sample_STAR_round2_output[file_num]
cmd+=" 2> "+options.background_sample_STAR_round2_error[file_num]
os.system(cmd)
#print(cmd)
if num==0:
cmd="rm "+options.selected_sample_STAR_prefix_round2[file_num]+"Log.out "
cmd+=options.selected_sample_STAR_prefix_round2[file_num]+"Log.progress.out "
cmd+=options.selected_sample_STAR_prefix_round2[file_num]+"SJ.out.tab "
cmd+=options.selected_sample_STAR_round2_output[file_num]
else:
cmd="rm "+options.background_sample_STAR_prefix_round2[file_num]+"Log.out "
cmd+=options.background_sample_STAR_prefix_round2[file_num]+"Log.progress.out "
cmd+=options.background_sample_STAR_prefix_round2[file_num]+"SJ.out.tab "
cmd+=options.background_sample_STAR_round2_output[file_num]
os.system(cmd)
if num==0 and os.stat(options.selected_sample_STAR_round2_error[file_num]).st_size == 0:
cmd="rm "+options.selected_sample_STAR_round2_error[file_num]
elif num==1 and os.stat(options.background_sample_STAR_round2_error[file_num]).st_size == 0:
cmd="rm "+options.background_sample_STAR_round2_error[file_num]
os.system(cmd)
#logger.info("STAR round2 mapping for "+filename+" completed")
"""if num==0:
cmd="cp "+options.selected_sample_STAR_transcriptome_bamfilename_round2[file_num]+" "+options.selected_sample_STAR_transcriptome_bamfilename_round2_fusion_reads[file_num]
os.system(cmd)
else:
cmd="cp "+options.background_sample_STAR_transcriptome_bamfilename_round2[file_num]+" "+options.background_sample_STAR_transcriptome_bamfilename_round2_fusion_reads[file_num]
os.system(cmd)"""
cmd="STAR "
cmd+=" --genomeLoad Remove "
cmd+=" --genomeDir "+options.star_genome_index
os.system(cmd)
# Mapping fusion reads to transcriptome - Need to repeat this step since STAR is buggy
for num,eachtype in enumerate([options.selected_sample_N_removed,options.background_sample_N_removed]):
for file_num,filename in enumerate(eachtype):
#filename=filename.split("/")[-1].split(".fastq")[0]
if options.selected_ended=="PE" and options.background_ended=="PE":
if file_num%2==0:continue
cmd="STAR "
cmd+=" --runThreadN "+str(options.CPU)+" "
#cmd+=" --genomeDir "+options.star_genome_index
cmd+=" --genomeDir "+options.transcriptome_index
cmd+=" --genomeLoad LoadAndKeep "
if num==0:
if options.selected_ended=="SE" and options.background_ended=="SE":
cmd+=" --readFilesIn "+options.selected_sample_fusion_reads[file_num]
else:
cmd+=" --readFilesIn "+options.selected_sample_fusion_reads[file_num-1]+" "+options.selected_sample_fusion_reads[file_num]
cmd+=" --outFileNamePrefix "+options.selected_sample_STAR_prefix_round2[file_num]+"_transcriptome_"
else:
if options.selected_ended=="SE" and options.background_ended=="SE":
cmd+=" --readFilesIn "+options.background_sample_fusion_reads[file_num]
else:
cmd+=" --readFilesIn "+options.background_sample_fusion_reads[file_num-1]+" "+options.background_sample_fusion_reads[file_num]
cmd+=" --outFileNamePrefix "+options.background_sample_STAR_prefix_round2[file_num]+"_transcriptome_"
cmd+=" --outSAMtype SAM "
#cmd+=" --outReadsUnmapped Fastx "
cmd+=" --outFilterMultimapNmax 500 "
cmd+=" --seedPerReadNmax 5000 "
cmd+=" --seedPerWindowNmax 100 "
cmd+=" --outFilterScoreMinOverLread 0.8 --outFilterMatchNminOverLread 0.85 "
cmd+=" --alignIntronMax 10000 "
#cmd+=" --quantMode TranscriptomeSAM "
#cmd+=" --quantTranscriptomeBan Singleend "
if num==0:
cmd+=" > "+options.selected_sample_STAR_round2_output[file_num]
cmd+=" 2> "+options.selected_sample_STAR_round2_error[file_num]
else:
cmd+=" > "+options.background_sample_STAR_round2_output[file_num]
cmd+=" 2> "+options.background_sample_STAR_round2_error[file_num]
#print(cmd)
with logging_mutex:
logger_proxy.info(f"Running command {cmd}")
os.system(cmd)
#print(cmd)
#continue
cmd="samtools view -bSh "
if num==0:
cmd+=" "+options.selected_sample_STAR_prefix_round2[file_num]+"_transcriptome_Aligned.out.sam "
cmd+=" > "+options.selected_sample_STAR_prefix_round2[file_num]+"Aligned.toTranscriptome.out.bam"
else:
cmd+=" "+options.background_sample_STAR_prefix_round2[file_num]+"_transcriptome_Aligned.out.sam "
cmd+=" > "+options.background_sample_STAR_prefix_round2[file_num]+"Aligned.toTranscriptome.out.bam"
os.system(cmd)
#print(cmd)
if num==0:
cmd="cp "+options.selected_sample_STAR_transcriptome_bamfilename_round2[file_num]+" "
cmd+=options.selected_sample_STAR_transcriptome_bamfilename_round2_fusion_reads[file_num]
#print(cmd)
os.system(cmd)
else:
cmd="cp "+options.background_sample_STAR_transcriptome_bamfilename_round2[file_num]+" "
cmd+=options.background_sample_STAR_transcriptome_bamfilename_round2_fusion_reads[file_num]
#print(cmd)
os.system(cmd)
#logger.info("STAR round2 mapping for "+filename+" completed")
cmd="STAR "
cmd+=" --genomeLoad Remove "
cmd+=" --genomeDir "+options.transcriptome_index
os.system(cmd)
| 53.580357
| 190
| 0.614564
| 1,282
| 12,002
| 5.450078
| 0.102184
| 0.071132
| 0.10219
| 0.089452
| 0.879061
| 0.855732
| 0.850293
| 0.832403
| 0.809074
| 0.750394
| 0
| 0.017542
| 0.273288
| 12,002
| 223
| 191
| 53.820628
| 0.783536
| 0.097567
| 0
| 0.652439
| 1
| 0
| 0.164075
| 0.039852
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012195
| false
| 0
| 0.006098
| 0
| 0.018293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29a63807ce992110a0e9099036cabe7afc1883bd
| 6,537
|
py
|
Python
|
test/test-cert/test-cert.py
|
clayne/syringe-1
|
4a431aa65c371a2018fca95145a3952ba802a609
|
[
"BSD-2-Clause"
] | 25
|
2015-04-14T21:53:46.000Z
|
2022-03-30T19:15:24.000Z
|
test/test-cert/test-cert.py
|
clayne/syringe-1
|
4a431aa65c371a2018fca95145a3952ba802a609
|
[
"BSD-2-Clause"
] | 5
|
2020-03-23T20:19:59.000Z
|
2021-05-24T19:38:31.000Z
|
test/test-cert/test-cert.py
|
clayne/syringe-1
|
4a431aa65c371a2018fca95145a3952ba802a609
|
[
"BSD-2-Clause"
] | 7
|
2015-07-31T13:26:37.000Z
|
2021-03-05T19:35:37.000Z
|
import ptypes,pecoff
from ptypes import *
ptypes.setsource(ptypes.prov.file('c:/windows/sysnative/ntdll.dll', 'r'))
z = pecoff.Executable.File()
z=z.l
a = z['next']['header']['datadirectory'][4]['Address'].d.l
a = z['next']['header']['certificate']
#print a['bCertificate'].hexdump()
c = a[0]['bCertificate']
file('./ntdll.cert.pkcs7','wb').write(c.serialize())
print c.hexdump(lines=1)
import ber; reload(ber)
d = c.cast(ber.Record, recurse={'byteorder':ptypes.config.byteorder.bigendian})
print d['value']
print d['value'][0]
print d['value'][1]['Value']
e = d['value'][1]['Value'].cast(ber.Record)
print e['Value'][0]
print e['Value'][1]['Value'][0]
print e['Value'][2]['Value'][0]
print e['Value'][2]['Value'][1]['Value']
f = e['Value'][2]['Value'][1]['Value'].cast(ber.Record)
print f['Value'][0]['Value'][0]
print f['Value'][0]['Value'][1]['Value'][0]
print f['Value'][0]['Value'][1]['Value'][1]['Value'].cast(ber.Record)['Value'].cast(ber.Record)['Value']
print f['Value'][1]['Value'][0]
print f['Value'][1]['Value'][0]['Value'][0]
print f['Value'][1]['Value'][0]['Value'][1]
print f['Value'][1]['Value'][1]['Value']
print e['Value'][3]['Value']
g = e['Value'][3]['Value'].cast(ber.Record)
print g['Value'][0]['Value'][0]
print g['Value'][0]['Value'][1]
print g['Value'][0]['Value'][2]['Value'][0]
print g['Value'][0]['Value'][2]['Value'][1]
print g['Value'][0]['Value'][3]['Value'][0]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][3]['Value'][0]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][3]['Value'][1]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][3]['Value'][1]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][3]['Value'][2]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][3]['Value'][2]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][3]['Value'][3]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][3]['Value'][3]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][3]['Value'][4]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][3]['Value'][4]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][4]['Value'][0]
print g['Value'][0]['Value'][4]['Value'][1]
print g['Value'][0]['Value'][5]['Value'][0]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][5]['Value'][0]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][5]['Value'][1]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][5]['Value'][1]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][5]['Value'][2]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][5]['Value'][2]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][5]['Value'][3]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][5]['Value'][3]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][5]['Value'][4]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][5]['Value'][4]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][5]['Value'][5]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][5]['Value'][5]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][5]['Value'][6]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][5]['Value'][6]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][6]['Value'][0]['Value'][0]
print g['Value'][0]['Value'][6]['Value'][0]['Value'][1]
print g['Value'][0]['Value'][6]['Value'][1]['Value'].cast(ber.Record)['Value'].cast(ber.Record)
print g['Value'][0]['Value'][7]['Value']
print g['Value'][1]['Value'][0]
print g['Value'][1]['Value'][1]
print g['Value'][2]['Value'].cast(ber.Record)
print e['Value'][4]['Value'][0]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][0]['Value'][0]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][0]['Value'][0]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][2]['Value'][0]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][2]['Value'][0]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][3]['Value'][0]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][3]['Value'][0]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][4]['Value'][0]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][0]['Value'][4]['Value'][0]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][1]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][2]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][2]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][3]['Value'].cast(ber.Record)['Value'][0]
print e['Value'][4]['Value'][0]['Value'][3]['Value'].cast(ber.Record)['Value'][1]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][4]['Value'][0]
print e['Value'][4]['Value'][0]['Value'][4]['Value'][1]
print e['Value'][4]['Value'][0]['Value'][5]
print e['Value'][4]['Value'][0]['Value'][6]['Value']
h = e['Value'][4]['Value'][0]['Value'][6]['Value'].cast(ber.Record)
print h['Value'][0]
print h['Value'][1]['Value'][0]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][0]['Value'][0]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][0]['Value'][0]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][2]['Value'][0]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][2]['Value'][0]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][3]['Value'][0]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][3]['Value'][0]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][4]['Value'][0]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][0]['Value'][4]['Value'][0]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][1]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][2]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][2]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][3]['Value'].cast(ber.Record)['Value'][0]
print h['Value'][1]['Value'][0]['Value'][3]['Value'].cast(ber.Record)['Value'][1]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][4]['Value'][0]
print h['Value'][1]['Value'][0]['Value'][4]['Value'][1]
print h['Value'][1]['Value'][0]['Value'][5]
| 43.58
| 104
| 0.550099
| 1,132
| 6,537
| 3.176678
| 0.04682
| 0.332036
| 0.46802
| 0.183537
| 0.891268
| 0.857341
| 0.850389
| 0.792547
| 0.779199
| 0.672692
| 0
| 0.064933
| 0.04115
| 6,537
| 149
| 105
| 43.872483
| 0.508775
| 0.005048
| 0
| 0
| 0
| 0
| 0.344918
| 0.004613
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.027778
| null | null | 0.861111
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
29b4a590f01b586c23ee8c1336ad763d2cdbd3a9
| 284
|
py
|
Python
|
coursecake/scrapers/ucsc/constants.py
|
nananananate/CourseScraper
|
4ef40cb2bd5f42177c8596fd18ead66b4a9d2379
|
[
"MIT"
] | 21
|
2020-07-18T01:17:53.000Z
|
2021-09-11T08:28:59.000Z
|
coursecake/scrapers/ucsc/constants.py
|
AlfiyaZi/CourseCake
|
be2af4d0025f2963ed83001004f87df2c6d8b71d
|
[
"MIT"
] | 4
|
2020-10-03T00:22:20.000Z
|
2021-03-31T19:54:33.000Z
|
coursecake/scrapers/ucsc/constants.py
|
AlfiyaZi/CourseCake
|
be2af4d0025f2963ed83001004f87df2c6d8b71d
|
[
"MIT"
] | 4
|
2020-09-05T05:17:26.000Z
|
2020-10-16T05:49:34.000Z
|
TERMS_API_URL = "https://andromeda.miragespace.net/slugsurvival/data/fetch/terms.json"
CLASSES_API_BASE_URL = (
"https://andromeda.miragespace.net/slugsurvival/data/fetch/terms/"
)
COURSE_API_BASE_URL = (
"https://andromeda.miragespace.net/slugsurvival/data/fetch/courses/"
)
| 35.5
| 86
| 0.774648
| 36
| 284
| 5.888889
| 0.416667
| 0.113208
| 0.240566
| 0.396226
| 0.849057
| 0.849057
| 0.849057
| 0.849057
| 0.849057
| 0.556604
| 0
| 0
| 0.073944
| 284
| 7
| 87
| 40.571429
| 0.806084
| 0
| 0
| 0
| 0
| 0
| 0.697183
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d99ccdc1913511376644c946206098877d3822bc
| 37,408
|
py
|
Python
|
pytests_stale/cbas/cbas_secondary_indexes.py
|
sumedhpb/TAF
|
fc6f4cb8dc0b8234393f2e52a7b4a1aa723d9449
|
[
"Apache-2.0"
] | 9
|
2019-02-19T05:55:00.000Z
|
2022-01-20T10:37:28.000Z
|
pytests_stale/cbas/cbas_secondary_indexes.py
|
sumedhpb/TAF
|
fc6f4cb8dc0b8234393f2e52a7b4a1aa723d9449
|
[
"Apache-2.0"
] | 2
|
2019-02-19T07:28:54.000Z
|
2019-06-18T11:22:29.000Z
|
pytests_stale/cbas/cbas_secondary_indexes.py
|
sumedhpb/TAF
|
fc6f4cb8dc0b8234393f2e52a7b4a1aa723d9449
|
[
"Apache-2.0"
] | 155
|
2018-11-13T14:57:07.000Z
|
2022-03-28T11:53:22.000Z
|
import threading
import random
from TestInput import TestInputSingleton
from cbas.cbas_base import CBASBaseTest
class CBASSecondaryIndexes(CBASBaseTest):
def setUp(self):
self.input = TestInputSingleton.input
if "default_bucket" not in self.input.test_params:
self.input.test_params.update({"default_bucket": False})
super(CBASSecondaryIndexes, self).setUp()
self.load_sample_buckets(servers=[self.cluster.master],
bucketName=self.cb_bucket_name,
total_items=self.beer_sample_docs_count)
if "add_all_cbas_nodes" in self.input.test_params and \
self.input.test_params["add_all_cbas_nodes"] and len(
self.cluster.cbas_nodes) > 1:
self.cluster_util.add_all_nodes_then_rebalance(
self.cluster, self.cluster.cbas_nodes)
self.cbas_util.createConn(self.cb_bucket_name)
# Create dataset on the CBAS bucket
self.cbas_util.create_dataset_on_bucket(
cbas_bucket_name=self.cb_bucket_name,
cbas_dataset_name=self.cbas_dataset_name,
compress_dataset=self.compress_dataset)
def tearDown(self):
super(CBASSecondaryIndexes, self).tearDown()
def verify_index_used(self, statement, index_used=False, index_name=None):
statement = 'EXPLAIN %s'%statement
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, "success")
if status == 'success':
self.assertEquals(errors, None)
if index_used:
self.assertTrue("index-search" in str(results))
self.assertFalse("data-scan" in str(results))
self.log.info("INDEX-SEARCH is found in EXPLAIN hence indexed data will be scanned to serve %s"%statement)
if index_name:
self.assertTrue(index_name in str(results))
else:
self.assertTrue("data-scan" in str(results))
self.assertFalse("index-search" in str(results))
self.log.info("DATA-SCAN is found in EXPLAIN hence index is not used to serve %s"%statement)
def test_create_index(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index on various fields as passed in the parameters
3. Validate if the index is created and the index definition has the expected fields
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
def test_create_index_without_if_not_exists(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index
3. Again create an index with the same name without using IF_NOT_EXISTS clause
3. Validate if the error msg is as expected
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
# Create another index with same name
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(self.cbas_util.validate_error_in_response(status, errors, self.expected_error),
"Error msg not matching expected error msg")
def test_create_index_with_if_not_exists(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index
3. Again create an index with the same name using IF_NOT_EXISTS clause
3. Validate if that there is no error
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
# Create another index with same name
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
def test_create_index_with_if_not_exists_different_fields(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index
3. Again create an index with the same name but with different fields using IF_NOT_EXISTS clause
4. Validate there is no error
5. The index definition of should not change.
Author : Mihir Kamdar
Created date : 8/1/2017
'''
index_field1 = "city:string"
index_field2 = "abv:bigint"
# Create Index
create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_field1)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, [index_field1],
self.cbas_dataset_name)[0])
# Create another index with same name
create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_field2)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
# The index definition should be based on the older field, it should not change
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, [index_field1],
self.cbas_dataset_name)[0])
def test_multiple_composite_index_with_overlapping_fields(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index
3. Again create a composite index
4. Now create another composite index with some overlapping fields
5. Both the indexes should get created successfully
Author : Mihir Kamdar
Created date : 8/1/2017
'''
index_fields1 = ["city:string", "abv:bigint"]
index_fields2 = ["abv:bigint", "geo.lat:double"]
# Create Index
index_fields = ""
for index_field in index_fields1:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
self.index_name + "1", self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name + "1", index_fields1,
self.cbas_dataset_name)[0])
# Create another composite index with overlapping fields
index_fields = ""
for index_field in index_fields2:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
self.index_name + "2", self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name + "2", index_fields2,
self.cbas_dataset_name)[0])
statement = 'SELECT VALUE v FROM '+ self.cbas_dataset_name + ' v WHERE v.geo.lat > 1 AND v.abv > 2'
self.verify_index_used(statement, True, self.index_name)
def test_create_index_non_empty_dataset(self):
'''
Steps :
1. Create bucket in CBAS, create dataset, connect to the bucket, disconnect from bucket
2. Create index
3. Validate the index is created correctly
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Connect to Bucket
result = self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
# Allow ingestion to complete
self.sleep(30)
# Disconnect from bucket
result = self.cbas_util.disconnect_from_bucket(cbas_bucket_name=
self.cbas_bucket_name)
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
def test_create_index_with_bucket_connected(self):
'''
Steps :
1. Create bucket in CBAS, create dataset, connect to the bucket
2. Create index
3. Create index should fail.
4. Validate that the error msg is as expected
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Connect to Bucket
self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
# Allow ingestion to complete
self.sleep(30)
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(self.cbas_util.validate_error_in_response(status, errors, self.expected_error))
def test_drop_index(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index
3. Validate the index is created correctly
4. Drop index
5. Validate that the index is dropped
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
drop_idx_statement = "drop index {0}.{1};".format(
self.cbas_dataset_name, self.index_name)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
drop_idx_statement)
self.assertTrue(status == "success", "Drop Index query failed")
self.assertFalse(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
def test_drop_non_existing_index(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Drop a non-existing index without using IF_EXISTS clause
3. Validate that the error msg is as expected
4. Drop a non-existing index using IF_EXISTS clause
5. Validate there is no error
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Drop non-existing index without IF EXISTS
drop_idx_statement = "drop index {0}.{1};".format(
self.cbas_dataset_name, self.index_name)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
drop_idx_statement)
self.assertTrue(self.cbas_util.validate_error_in_response(status, errors, self.expected_error))
# Drop non-existing index with IF EXISTS
drop_idx_statement = "drop index {0}.{1} IF EXISTS;".format(
self.cbas_dataset_name, self.index_name)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
drop_idx_statement)
self.assertEqual(status, "success",
"Drop non existent index with IF EXISTS fails")
def test_drop_dataset_drops_index(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index
3. Validate the index is created correctly
4. Drop dataset
5. Validate that the index is also dropped
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
# Drop dataset
self.cbas_util.drop_dataset(self.cbas_dataset_name)
# Check that the index no longer exists
self.assertFalse(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
def test_drop_non_empty_index(self):
'''
Steps :
1. Create bucket in CBAS, create dataset
2. Create index
3. Validate the index is created correctly
4. Connect dataset, disconnect dataset
5. Drop index
6. Validate that the index is dropped
Author : Mihir Kamdar
Created date : 8/1/2017
'''
# Create Index
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
# Connect to Bucket
self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
# Allow ingestion to complete
self.sleep(30)
# Disconnect from bucket
self.cbas_util.disconnect_from_bucket(cbas_bucket_name=
self.cbas_bucket_name)
drop_idx_statement = "drop index {0}.{1};".format(
self.cbas_dataset_name, self.index_name)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
drop_idx_statement)
self.assertTrue(status == "success", "Drop Index query failed")
self.assertFalse(
self.cbas_util.verify_index_created(self.index_name,
self.index_fields,
self.cbas_dataset_name)[0])
def _direct_client(self, server, bucket, timeout=30):
# CREATE SDK CLIENT
if self.sdk_client_type == "java":
try:
from sdk_client3 import SDKClient
scheme = "couchbase"
host = self.cluster.master.ip
if self.cluster.master.ip == "127.0.0.1":
scheme = "http"
host = "{0}:{1}".format(self.cluster.master.ip, self.cluster.master.port)
return SDKClient(scheme=scheme, hosts=[host], bucket=bucket,
password=self.cluster.master.rest_password)
except Exception, ex:
self.log.error("cannot load sdk client due to error {0}"
.format(str(ex)))
# USE MC BIN CLIENT WHEN NOT USING SDK CLIENT
return self.direct_mc_bin_client(server, bucket, timeout=timeout)
def test_index_population(self):
'''
Steps :
1.
'''
# Create Index
# to_verify=0
search_by = self.input.param("search_by", '')
exp_number = self.input.param("exp_number", 0)
not_fit_value = self.input.param("not_fit_value", '')
expected_status = self.input.param("status", 'success')
binary = self.input.param("binary", False)
index_used = self.input.param("index_used", False)
if ";" in str(not_fit_value):
not_fit_value = not_fit_value.split(';')
testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
self.client = self._direct_client(self.cluster.master, self.cb_bucket_name)
k = 'test_index_population'
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
if binary:
self.client.upsert('utf16_doc', not_fit_value.encode('utf16'))
else:
if "." in index_fields.split(":")[0]:
self.client.upsert(k, {index_fields.split(":")[0].split(".")[0]:{index_fields.split(":")[0].split(".")[1] : not_fit_value}})
else:
self.client.upsert(k, {index_fields.split(":")[0] : not_fit_value})
self.client.close()
if index_fields.split(":")[1] == "string" and isinstance(not_fit_value,str) or \
index_fields.split(":")[1] == "double" and isinstance(not_fit_value,(float,int)) or \
index_fields.split(":")[1] == "bigint" and isinstance(not_fit_value,(float,int)):
index_used=True
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
self.sleep(20)
if isinstance(search_by, basestring):
statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name, index_fields.split(":")[0], search_by)
else:
statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
index_fields.split(":")[0], search_by)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, "success")
self.assertEquals(errors, None)
self.assertEquals(results, [{'$1': exp_number}])
if isinstance(not_fit_value,str):
statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name,
index_fields.split(":")[0], not_fit_value)
else:
statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
index_fields.split(":")[0], not_fit_value)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, expected_status)
if status == 'success':
self.assertEquals(errors, None)
self.assertEquals(results, [{'$1': 1}])
self.log.info("Verify whether statement %s used index or not. Indexed: %s"%(statement,index_fields))
self.verify_index_used(statement, index_used, self.index_name)
# https://issues.couchbase.com/browse/MB-25646
# https://issues.couchbase.com/browse/MB-25657
def test_index_population_thread(self):
to_verify = 0
index_used = self.input.param("index_used", False)
def update_data(client, index_fields):
for _ in xrange(100):
if index_fields.split(":")[-1] == 'double':
not_fit_value = random.choice([False, "sdfs", 11111])
elif index_fields.split(":")[-1] == 'string':
not_fit_value = random.choice([False, 11111, 36.6])
elif index_fields.split(":")[-1] == 'bigint':
not_fit_value = random.choice([False, "sdfs", 36.6])
perc = random.randrange(0, 100)
if perc > 75:
# 25% with binary data
# client.upsert('utf16_doc', str(not_fit_value).encode('utf16'), format=FMT_BYTES)
client.upsert(k, {index_fields.split(":")[0]: not_fit_value})
else:
# 10% field removed
client.upsert(k, {index_fields.split(":")[0] + "_NEW_FIELD": not_fit_value})
# Create Index
search_by = self.input.param("search_by", '')
exp_number = self.input.param("exp_number", 0)
not_fit_value = self.input.param("not_fit_value", '')
expected_status = self.input.param("status", 'success')
if ";" in not_fit_value:
not_fit_value = not_fit_value.split(';')
testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
self.client = self._direct_client(self.cluster.master, self.cb_bucket_name)
k = 'test_index_population_thread'
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
self.sleep(10)
d = threading.Thread(name='daemon', target=update_data, args=(self.client, index_fields,))
d.setDaemon(True)
d.start()
for i in xrange(10):
if isinstance(search_by, basestring):
statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name,
index_fields.split(":")[0], search_by)
else:
statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
index_fields.split(":")[0], search_by)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, "success")
self.assertEquals(errors, None)
self.assertEquals(results, [{'$1': exp_number}])
if isinstance(not_fit_value,str):
statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name,
index_fields.split(":")[0], not_fit_value)
else:
statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
index_fields.split(":")[0], not_fit_value)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, expected_status)
if status == 'success':
self.assertEquals(errors, None)
self.assertEquals(results, [{'$1': 0}])
self.log.info("Verify whether statement %s used index or not. Indexed: %s"%(statement,index_fields))
self.verify_index_used(statement, index_used, self.index_name)
self.client.close()
def test_index_population_where_statements(self):
exp_number = self.input.param("exp_number", 0)
where_statement = self.input.param("where_statement", '').replace('_EQ_', '=')
index_used = self.input.param("index_used", False)
testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
self.sleep(20)
statement = 'SELECT count(*) FROM `{0}` where {1};'.format(self.cbas_dataset_name, where_statement)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, "success")
self.assertEquals(errors, None)
self.assertEquals(results, [{'$1': exp_number}])
self.log.info("Verify whether statement %s used index or not. Indexed: %s"%(statement,index_fields))
self.verify_index_used(statement, index_used, self.index_name)
def test_index_population_joins(self):
exp_number = self.input.param("exp_number", 0)
self.index_name2 = self.input.param('index_name2', None)
self.index_fields2 = self.input.param('index_fields2', None)
if self.index_fields2:
self.index_fields2 = self.index_fields2.split("-")
statement = self.input.param("statement", '').replace('_EQ_', '=').replace('_COMMA_', ',')
testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)[0])
index_fields2 = ""
for index_field in self.index_fields2:
index_fields2 += index_field + ","
index_fields2 = index_fields2[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name2, self.cbas_dataset_name, index_fields2)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(
self.cbas_util.verify_index_created(self.index_name2, self.index_fields2,
self.cbas_dataset_name)[0])
self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
self.sleep(20)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, "success")
self.assertEquals(errors, None)
self.assertEquals(len(results), exp_number)
# https://issues.couchbase.com/browse/MB-25695
def test_index_metadata(self):
self.buckets = [Bucket(name="beer-sample")]
self.perform_doc_ops_in_all_cb_buckets("create", start_key=0, end_key=100000)
index_fields = ""
for index_field in self.index_fields:
index_fields += index_field + ","
index_fields = index_fields[:-1]
create_idx_statement = "create index {0} on {1}({2});".format(
self.index_name, self.cbas_dataset_name, index_fields)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.cbas_util.connect_to_bucket(cbas_bucket_name=
self.cbas_bucket_name,
cb_bucket_password=self.cb_bucket_password)
self.cbas_util.wait_for_ingestion_complete([self.cbas_dataset_name], 107303)
statement = 'SELECT count(*) FROM `{0}`'.format(self.cbas_dataset_name)
#
_, result = self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)
self.assertEquals(result[0]['Index']['DatasetName'], self.cbas_dataset_name)
self.assertEquals(result[0]['Index']['DataverseName'], 'Default')
self.assertEquals(result[0]['Index']['IndexName'], self.index_name)
self.assertEquals(result[0]['Index']['IndexStructure'], 'BTREE')
self.assertEquals(result[0]['Index']['IsPrimary'], False)
self.assertEquals(result[0]['Index']['PendingOp'], 0)
self.assertEquals(result[0]['Index']['SearchKey'], [index_field.split(":")[:-1]])
self.assertEquals(result[0]['Index']['SearchKeyType'], index_field.split(":")[1:])
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, "success")
self.assertEquals(errors, None)
self.assertEquals(results, [{'$1': 107303}])
self.cbas_util.disconnect_from_bucket(cbas_bucket_name=
self.cbas_bucket_name)
drop_idx_statement = "drop index {0}.{1};".format(self.cbas_dataset_name, self.index_name)
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
drop_idx_statement)
_, result = self.cbas_util.verify_index_created(self.index_name, self.index_fields,
self.cbas_dataset_name)
self.assertEquals(result, [])
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(
statement)
self.assertEquals(status, "success")
self.assertEquals(errors, None)
self.assertEquals(results, [{'$1': 107303}])
def test_index_on_nested_fields_same_object(self):
index_fields = ["geo.lon:double", "geo.lat:double"]
create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(self.index_name, self.cbas_dataset_name, ",".join(index_fields))
status, metrics, errors, results, _ = self.cbas_util.execute_statement_on_cbas_util(create_idx_statement)
self.assertTrue(status == "success", "Create Index query failed")
self.assertTrue(self.cbas_util.verify_index_created(self.index_name, index_fields, self.cbas_dataset_name)[0])
statement = 'SELECT VALUE v FROM '+ self.cbas_dataset_name + ' v WHERE v.geo.lon > 1 AND v.geo.lat > 2'
self.verify_index_used(statement, True, self.index_name)
| 46.125771
| 148
| 0.604149
| 4,395
| 37,408
| 4.866439
| 0.069625
| 0.056106
| 0.042641
| 0.055966
| 0.823639
| 0.783009
| 0.764775
| 0.749065
| 0.733355
| 0.72176
| 0
| 0.016542
| 0.293787
| 37,408
| 810
| 149
| 46.182716
| 0.793058
| 0.029486
| 0
| 0.700565
| 0
| 0
| 0.104205
| 0.001508
| 0
| 0
| 0
| 0
| 0.161959
| 0
| null | null | 0.024482
| 0.009416
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d9a5aa35dc4bc260dd65fb23ddcc1311a5b870ed
| 6,770
|
py
|
Python
|
test_cases_eval.py
|
mc2259/grit_doc_event_entity
|
42d10cc79a21b27338950c945cfb35bcb89dc542
|
[
"MIT"
] | 31
|
2020-08-24T04:56:27.000Z
|
2020-10-23T02:33:40.000Z
|
test_cases_eval.py
|
mc2259/grit_doc_event_entity
|
42d10cc79a21b27338950c945cfb35bcb89dc542
|
[
"MIT"
] | 2
|
2021-04-14T03:34:05.000Z
|
2021-07-29T04:02:26.000Z
|
test_cases_eval.py
|
mc2259/grit_doc_event_entity
|
42d10cc79a21b27338950c945cfb35bcb89dc542
|
[
"MIT"
] | 9
|
2021-01-29T12:31:49.000Z
|
2021-12-30T06:46:19.000Z
|
from eval import eval_ceaf
if __name__ == "__main__":
print("================= case 1 (in the paper) =================")
golds = {
"docid1": {"Target": [["Pilmai telephone company building", "telephone company building", "telephone company offices"], ["water pipes"], ["public telephone booth"]]},
}
preds = {
"docid1": {"Target": [["water pipes"], ["Pilmai telephone company building"], ["public telephone booth"], ["telephone company offices"]]},
}
results = eval_ceaf(preds, golds, docids=[])
print("golds", golds)
print("preds", preds)
print("phi_strict: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["strict"]["micro_avg"]["p"] * 100, results["strict"]["micro_avg"]["r"] * 100, results["strict"]["micro_avg"]["f1"] * 100))
print("phi_prop: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["prop"]["micro_avg"]["p"] * 100, results["prop"]["micro_avg"]["r"] * 100, results["prop"]["micro_avg"]["f1"] * 100))
print
print("================= case 2 (in the paper) =================")
golds = {
"docid1": {"Target": [["Pilmai telephone company building", "telephone company building", "telephone company offices"], ["water pipes"], ["public telephone booth"]]},
}
preds = {
"docid1": {"Target": [["Pilmai telephone company building"], ["water pipes"], ["public telephone booth"]]},
}
results = eval_ceaf(preds, golds, docids=[])
print("golds", golds)
print("preds", preds)
print("phi_strict: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["strict"]["micro_avg"]["p"] * 100, results["strict"]["micro_avg"]["r"] * 100, results["strict"]["micro_avg"]["f1"] * 100))
print("phi_prop: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["prop"]["micro_avg"]["p"] * 100, results["prop"]["micro_avg"]["r"] * 100, results["prop"]["micro_avg"]["f1"] * 100))
print
print("================= case 3 (in the paper) =================")
golds = {
"docid1": {"Target": [["Pilmai telephone company building", "telephone company building", "telephone company offices"], ["water pipes"], ["public telephone booth"]]},
}
preds = {
"docid1": {"Target": [["Pilmai telephone company building"], ["public telephone booth"]]},
}
results = eval_ceaf(preds, golds, docids=[])
print("golds", golds)
print("preds", preds)
print("phi_strict: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["strict"]["micro_avg"]["p"] * 100, results["strict"]["micro_avg"]["r"] * 100, results["strict"]["micro_avg"]["f1"] * 100))
print("phi_prop: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["prop"]["micro_avg"]["p"] * 100, results["prop"]["micro_avg"]["r"] * 100, results["prop"]["micro_avg"]["f1"] * 100))
print
print("\n\n================= case 4 =================")
golds = {
"docid1": {"PerpInd": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]]},
}
preds = {
"docid1": {"PerpInd": [["m4"], ["m1"], ["m5"], ["m6"], ["m3"]]},
}
results = eval_ceaf(preds, golds, docids=[])
print("golds", golds)
print("preds", preds)
print("phi_strict: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["strict"]["micro_avg"]["p"] * 100, results["strict"]["micro_avg"]["r"] * 100, results["strict"]["micro_avg"]["f1"] * 100))
print("phi_prop: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["prop"]["micro_avg"]["p"] * 100, results["prop"]["micro_avg"]["r"] * 100, results["prop"]["micro_avg"]["f1"] * 100))
print
print("================= case 5 =================")
golds = {
"docid2": {"PerpInd": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]]},
}
preds = {
"docid2": {"PerpInd": [["m1", "m2"], ["m4"], ["m5"], ["m6"]]},
}
results = eval_ceaf(preds, golds, docids=[])
print("golds", golds)
print("preds", preds)
print("phi_strict: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["strict"]["micro_avg"]["p"] * 100, results["strict"]["micro_avg"]["r"] * 100, results["strict"]["micro_avg"]["f1"] * 100))
print("phi_prop: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["prop"]["micro_avg"]["p"] * 100, results["prop"]["micro_avg"]["r"] * 100, results["prop"]["micro_avg"]["f1"] * 100))
print
print("================= case 6 =================")
golds = {
"docid3": {"PerpInd": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]]},
}
preds = {
"docid3": {"PerpInd": [["m1", "m2", "m3", "m4"], ["m5"], ["m6"]]},
}
results = eval_ceaf(preds, golds, docids=[])
print("golds", golds)
print("preds", preds)
print("phi_strict: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["strict"]["micro_avg"]["p"] * 100, results["strict"]["micro_avg"]["r"] * 100, results["strict"]["micro_avg"]["f1"] * 100))
print("phi_prop: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["prop"]["micro_avg"]["p"] * 100, results["prop"]["micro_avg"]["r"] * 100, results["prop"]["micro_avg"]["f1"] * 100))
print
print("================= case 7 =================")
golds = {
"docid1": {"PerpInd": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]]},
"docid2": {"PerpInd": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]]},
"docid3": {"PerpInd": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]]},
"docid4": {"PerpInd": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]],
"PerpOrg": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]],
"Target": [["m1", "m2", "m3"], ["m4"], ["m5"], ["m6"]]}
}
preds = {
"docid1": {"PerpInd": [["m4"], ["m1"], ["m5"], ["m6"], ["m3"]]},
"docid2": {"PerpInd": [["m1", "m2"], ["m4"], ["m5"], ["m6"]]},
"docid3": {"PerpInd": [["m1", "m2", "m3", "m4"], ["m5"], ["m6"]]},
"docid4": {"PerpInd": [["m4"], ["m1"], ["m5"], ["m6"], ["m3"]],
"PerpOrg": [["m1", "m2"], ["m4"], ["m5"], ["m6"]],
"Target": [["m1", "m2", "m3", "m4"], ["m5"], ["m6"]]}
}
results = eval_ceaf(preds, golds, docids=[])
print("golds", golds)
print("preds", preds)
print("phi_strict: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["strict"]["micro_avg"]["p"] * 100, results["strict"]["micro_avg"]["r"] * 100, results["strict"]["micro_avg"]["f1"] * 100))
print("phi_prop: P: {:.2f}%, R: {:.2f}%, F1: {:.2f}%".format(results["prop"]["micro_avg"]["p"] * 100, results["prop"]["micro_avg"]["r"] * 100, results["prop"]["micro_avg"]["f1"] * 100))
print
| 53.307087
| 198
| 0.467356
| 756
| 6,770
| 4.089947
| 0.075397
| 0.108668
| 0.122251
| 0.142626
| 0.964101
| 0.959573
| 0.954398
| 0.923351
| 0.893596
| 0.878072
| 0
| 0.060714
| 0.209306
| 6,770
| 126
| 199
| 53.730159
| 0.516906
| 0
| 0
| 0.68932
| 0
| 0
| 0.40946
| 0.003104
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009709
| 0
| 0.009709
| 0.407767
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d9b51124c5574aaad25072fe4cc39113d7cd5584
| 9,798
|
py
|
Python
|
zerver/webhooks/beanstalk/tests.py
|
ajayns/zulip
|
c5ff020246cd75d0650f93ef621b76bdc4bbb8aa
|
[
"Apache-2.0"
] | null | null | null |
zerver/webhooks/beanstalk/tests.py
|
ajayns/zulip
|
c5ff020246cd75d0650f93ef621b76bdc4bbb8aa
|
[
"Apache-2.0"
] | null | null | null |
zerver/webhooks/beanstalk/tests.py
|
ajayns/zulip
|
c5ff020246cd75d0650f93ef621b76bdc4bbb8aa
|
[
"Apache-2.0"
] | 1
|
2019-10-14T23:36:14.000Z
|
2019-10-14T23:36:14.000Z
|
# -*- coding: utf-8 -*-
from mock import patch, MagicMock
from typing import Dict, Text
from zerver.lib.webhooks.git import COMMITS_LIMIT
from zerver.lib.test_classes import WebhookTestCase
class BeanstalkHookTests(WebhookTestCase):
STREAM_NAME = 'commits'
URL_TEMPLATE = u"/api/v1/external/beanstalk"
def test_git_single(self) -> None:
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 1 commit to branch master.
* add some stuff ([e50508d](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/e50508df))"""
self.send_and_test_stream_message('git_singlecommit', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_git_single_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,development')
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 1 commit to branch master.
* add some stuff ([e50508d](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/e50508df))"""
self.send_and_test_stream_message('git_singlecommit', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_git_multiple_committers(self) -> None:
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 3 commits to branch master. Commits by Leo Franchi (2) and Tomasz Kolek (1).
* Added new file ([edf529c](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/edf529c7))
* Filled in new file with some stuff ([c2a191b](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/c2a191b9))
* More work to fix some bugs ([2009815](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/20098158))"""
self.send_and_test_stream_message('git_multiple_committers', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_git_multiple_committers_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,development')
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 3 commits to branch master. Commits by Leo Franchi (2) and Tomasz Kolek (1).
* Added new file ([edf529c](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/edf529c7))
* Filled in new file with some stuff ([c2a191b](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/c2a191b9))
* More work to fix some bugs ([2009815](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/20098158))"""
self.send_and_test_stream_message('git_multiple_committers', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_git_multiple(self) -> None:
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 3 commits to branch master.
* Added new file ([edf529c](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/edf529c7))
* Filled in new file with some stuff ([c2a191b](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/c2a191b9))
* More work to fix some bugs ([2009815](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/20098158))"""
self.send_and_test_stream_message('git_multiple', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_git_multiple_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,development')
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 3 commits to branch master.
* Added new file ([edf529c](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/edf529c7))
* Filled in new file with some stuff ([c2a191b](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/c2a191b9))
* More work to fix some bugs ([2009815](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/20098158))"""
self.send_and_test_stream_message('git_multiple', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_git_more_than_limit(self) -> None:
commits_info = "* add some stuff ([e50508d](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/e50508df))\n"
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 50 commits to branch master.
{}[and {} more commit(s)]""".format((commits_info * COMMITS_LIMIT), 50 - COMMITS_LIMIT)
self.send_and_test_stream_message('git_morethanlimitcommits', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_git_more_than_limit_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,development')
commits_info = "* add some stuff ([e50508d](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/e50508df))\n"
expected_subject = "work-test / master"
expected_message = """Leo Franchi [pushed](http://lfranchi-svn.beanstalkapp.com/work-test) 50 commits to branch master.
{}[and {} more commit(s)]""".format((commits_info * COMMITS_LIMIT), 50 - COMMITS_LIMIT)
self.send_and_test_stream_message('git_morethanlimitcommits', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
@patch('zerver.webhooks.beanstalk.view.check_send_stream_message')
def test_git_single_filtered_by_branches_ignore(self, check_send_stream_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='changes,development')
payload = self.get_body('git_singlecommit')
result = self.client_post(self.url, payload,
**self.api_auth(self.TEST_USER_EMAIL))
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.beanstalk.view.check_send_stream_message')
def test_git_multiple_committers_filtered_by_branches_ignore(
self, check_send_stream_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='changes,development')
payload = self.get_body('git_multiple_committers')
result = self.client_post(self.url, payload,
**self.api_auth(self.TEST_USER_EMAIL))
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.beanstalk.view.check_send_stream_message')
def test_git_multiple_filtered_by_branches_ignore(
self, check_send_stream_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='changes,development')
payload = self.get_body('git_multiple')
result = self.client_post(self.url, payload,
**self.api_auth(self.TEST_USER_EMAIL))
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.beanstalk.view.check_send_stream_message')
def test_git_more_than_limit_filtered_by_branches_ignore(
self, check_send_stream_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='changes,development')
payload = self.get_body('git_morethanlimitcommits')
result = self.client_post(self.url, payload,
**self.api_auth(self.TEST_USER_EMAIL))
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
def test_svn_addremove(self) -> None:
expected_subject = "svn r3"
expected_message = """Leo Franchi pushed [revision 3](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/3):
> Removed a file and added another one!"""
self.send_and_test_stream_message('svn_addremove', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def test_svn_changefile(self) -> None:
expected_subject = "svn r2"
expected_message = """Leo Franchi pushed [revision 2](http://lfranchi-svn.beanstalkapp.com/work-test/changesets/2):
> Added some code"""
self.send_and_test_stream_message('svn_changefile', expected_subject, expected_message,
content_type=None,
**self.api_auth(self.TEST_USER_EMAIL))
def get_body(self, fixture_name: Text) -> Dict[str, Text]:
return {'payload': self.fixture_data('beanstalk', fixture_name)}
| 61.2375
| 175
| 0.671157
| 1,180
| 9,798
| 5.313559
| 0.109322
| 0.043381
| 0.062201
| 0.111962
| 0.921691
| 0.913397
| 0.900957
| 0.889155
| 0.872249
| 0.870175
| 0
| 0.024804
| 0.218208
| 9,798
| 159
| 176
| 61.622642
| 0.793734
| 0.010104
| 0
| 0.726563
| 0
| 0.203125
| 0.379926
| 0.040334
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.117188
| false
| 0
| 0.03125
| 0.007813
| 0.179688
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d9bd6a49412928e4ee354667f7d019074e11f6db
| 273,017
|
py
|
Python
|
sdk/python/pulumi_azure_native/media/v20180701/_inputs.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_native/media/v20180701/_inputs.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_native/media/v20180701/_inputs.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from ._enums import *
__all__ = [
'AacAudioArgs',
'AbsoluteClipTimeArgs',
'AkamaiAccessControlArgs',
'AkamaiSignatureHeaderAuthenticationKeyArgs',
'AudioArgs',
'AudioAnalyzerPresetArgs',
'AudioOverlayArgs',
'BuiltInStandardEncoderPresetArgs',
'CbcsDrmConfigurationArgs',
'CencDrmConfigurationArgs',
'CommonEncryptionCbcsArgs',
'CommonEncryptionCencArgs',
'ContentKeyPolicyClearKeyConfigurationArgs',
'ContentKeyPolicyFairPlayConfigurationArgs',
'ContentKeyPolicyFairPlayOfflineRentalConfigurationArgs',
'ContentKeyPolicyOpenRestrictionArgs',
'ContentKeyPolicyOptionArgs',
'ContentKeyPolicyPlayReadyConfigurationArgs',
'ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeaderArgs',
'ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifierArgs',
'ContentKeyPolicyPlayReadyExplicitAnalogTelevisionRestrictionArgs',
'ContentKeyPolicyPlayReadyLicenseArgs',
'ContentKeyPolicyPlayReadyPlayRightArgs',
'ContentKeyPolicyRsaTokenKeyArgs',
'ContentKeyPolicySymmetricTokenKeyArgs',
'ContentKeyPolicyTokenClaimArgs',
'ContentKeyPolicyTokenRestrictionArgs',
'ContentKeyPolicyUnknownConfigurationArgs',
'ContentKeyPolicyUnknownRestrictionArgs',
'ContentKeyPolicyWidevineConfigurationArgs',
'ContentKeyPolicyX509CertificateTokenKeyArgs',
'CopyAudioArgs',
'CopyVideoArgs',
'CrossSiteAccessPoliciesArgs',
'DefaultKeyArgs',
'DeinterlaceArgs',
'EnabledProtocolsArgs',
'EnvelopeEncryptionArgs',
'FaceDetectorPresetArgs',
'FilterTrackPropertyConditionArgs',
'FilterTrackSelectionArgs',
'FiltersArgs',
'FirstQualityArgs',
'H264LayerArgs',
'H264VideoArgs',
'HlsArgs',
'IPAccessControlArgs',
'IPRangeArgs',
'ImageArgs',
'ImageFormatArgs',
'JobInputAssetArgs',
'JobInputClipArgs',
'JobInputHttpArgs',
'JobInputsArgs',
'JobOutputAssetArgs',
'JpgFormatArgs',
'JpgImageArgs',
'JpgLayerArgs',
'LiveEventEncodingArgs',
'LiveEventEndpointArgs',
'LiveEventInputArgs',
'LiveEventInputAccessControlArgs',
'LiveEventPreviewArgs',
'LiveEventPreviewAccessControlArgs',
'Mp4FormatArgs',
'MultiBitrateFormatArgs',
'NoEncryptionArgs',
'OutputFileArgs',
'PngFormatArgs',
'PngImageArgs',
'PngLayerArgs',
'PresentationTimeRangeArgs',
'RectangleArgs',
'StandardEncoderPresetArgs',
'StorageAccountArgs',
'StreamingEndpointAccessControlArgs',
'StreamingLocatorContentKeyArgs',
'StreamingPolicyContentKeyArgs',
'StreamingPolicyContentKeysArgs',
'StreamingPolicyFairPlayConfigurationArgs',
'StreamingPolicyPlayReadyConfigurationArgs',
'StreamingPolicyWidevineConfigurationArgs',
'TrackPropertyConditionArgs',
'TrackSelectionArgs',
'TransformOutputArgs',
'TransportStreamFormatArgs',
'VideoArgs',
'VideoAnalyzerPresetArgs',
'VideoOverlayArgs',
]
@pulumi.input_type
class AacAudioArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
bitrate: Optional[pulumi.Input[int]] = None,
channels: Optional[pulumi.Input[int]] = None,
label: Optional[pulumi.Input[str]] = None,
profile: Optional[pulumi.Input[Union[str, 'AacAudioProfile']]] = None,
sampling_rate: Optional[pulumi.Input[int]] = None):
"""
Describes Advanced Audio Codec (AAC) audio encoding settings.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.Audio'.
:param pulumi.Input[int] bitrate: The bitrate, in bits per second, of the output encoded audio.
:param pulumi.Input[int] channels: The number of channels in the audio.
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
:param pulumi.Input[Union[str, 'AacAudioProfile']] profile: The encoding profile to be used when encoding audio with AAC.
:param pulumi.Input[int] sampling_rate: The sampling rate to use for encoding in hertz.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.Audio')
if bitrate is not None:
pulumi.set(__self__, "bitrate", bitrate)
if channels is not None:
pulumi.set(__self__, "channels", channels)
if label is not None:
pulumi.set(__self__, "label", label)
if profile is not None:
pulumi.set(__self__, "profile", profile)
if sampling_rate is not None:
pulumi.set(__self__, "sampling_rate", sampling_rate)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.Audio'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def bitrate(self) -> Optional[pulumi.Input[int]]:
"""
The bitrate, in bits per second, of the output encoded audio.
"""
return pulumi.get(self, "bitrate")
@bitrate.setter
def bitrate(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "bitrate", value)
@property
@pulumi.getter
def channels(self) -> Optional[pulumi.Input[int]]:
"""
The number of channels in the audio.
"""
return pulumi.get(self, "channels")
@channels.setter
def channels(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "channels", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def profile(self) -> Optional[pulumi.Input[Union[str, 'AacAudioProfile']]]:
"""
The encoding profile to be used when encoding audio with AAC.
"""
return pulumi.get(self, "profile")
@profile.setter
def profile(self, value: Optional[pulumi.Input[Union[str, 'AacAudioProfile']]]):
pulumi.set(self, "profile", value)
@property
@pulumi.getter(name="samplingRate")
def sampling_rate(self) -> Optional[pulumi.Input[int]]:
"""
The sampling rate to use for encoding in hertz.
"""
return pulumi.get(self, "sampling_rate")
@sampling_rate.setter
def sampling_rate(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "sampling_rate", value)
@pulumi.input_type
class AbsoluteClipTimeArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
time: pulumi.Input[str]):
"""
Specifies the clip time as an absolute time position in the media file. The absolute time can point to a different position depending on whether the media file starts from a timestamp of zero or not.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.AbsoluteClipTime'.
:param pulumi.Input[str] time: The time position on the timeline of the input media. It is usually specified as an ISO8601 period. e.g PT30S for 30 seconds.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.AbsoluteClipTime')
pulumi.set(__self__, "time", time)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.AbsoluteClipTime'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def time(self) -> pulumi.Input[str]:
"""
The time position on the timeline of the input media. It is usually specified as an ISO8601 period. e.g PT30S for 30 seconds.
"""
return pulumi.get(self, "time")
@time.setter
def time(self, value: pulumi.Input[str]):
pulumi.set(self, "time", value)
@pulumi.input_type
class AkamaiAccessControlArgs:
def __init__(__self__, *,
akamai_signature_header_authentication_key_list: Optional[pulumi.Input[Sequence[pulumi.Input['AkamaiSignatureHeaderAuthenticationKeyArgs']]]] = None):
"""
Akamai access control
:param pulumi.Input[Sequence[pulumi.Input['AkamaiSignatureHeaderAuthenticationKeyArgs']]] akamai_signature_header_authentication_key_list: authentication key list
"""
if akamai_signature_header_authentication_key_list is not None:
pulumi.set(__self__, "akamai_signature_header_authentication_key_list", akamai_signature_header_authentication_key_list)
@property
@pulumi.getter(name="akamaiSignatureHeaderAuthenticationKeyList")
def akamai_signature_header_authentication_key_list(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AkamaiSignatureHeaderAuthenticationKeyArgs']]]]:
"""
authentication key list
"""
return pulumi.get(self, "akamai_signature_header_authentication_key_list")
@akamai_signature_header_authentication_key_list.setter
def akamai_signature_header_authentication_key_list(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AkamaiSignatureHeaderAuthenticationKeyArgs']]]]):
pulumi.set(self, "akamai_signature_header_authentication_key_list", value)
@pulumi.input_type
class AkamaiSignatureHeaderAuthenticationKeyArgs:
def __init__(__self__, *,
base64_key: Optional[pulumi.Input[str]] = None,
expiration: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None):
"""
Akamai Signature Header authentication key.
:param pulumi.Input[str] base64_key: authentication key
:param pulumi.Input[str] expiration: The expiration time of the authentication key.
:param pulumi.Input[str] identifier: identifier of the key
"""
if base64_key is not None:
pulumi.set(__self__, "base64_key", base64_key)
if expiration is not None:
pulumi.set(__self__, "expiration", expiration)
if identifier is not None:
pulumi.set(__self__, "identifier", identifier)
@property
@pulumi.getter(name="base64Key")
def base64_key(self) -> Optional[pulumi.Input[str]]:
"""
authentication key
"""
return pulumi.get(self, "base64_key")
@base64_key.setter
def base64_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "base64_key", value)
@property
@pulumi.getter
def expiration(self) -> Optional[pulumi.Input[str]]:
"""
The expiration time of the authentication key.
"""
return pulumi.get(self, "expiration")
@expiration.setter
def expiration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expiration", value)
@property
@pulumi.getter
def identifier(self) -> Optional[pulumi.Input[str]]:
"""
identifier of the key
"""
return pulumi.get(self, "identifier")
@identifier.setter
def identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "identifier", value)
@pulumi.input_type
class AudioArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
bitrate: Optional[pulumi.Input[int]] = None,
channels: Optional[pulumi.Input[int]] = None,
label: Optional[pulumi.Input[str]] = None,
sampling_rate: Optional[pulumi.Input[int]] = None):
"""
Defines the common properties for all audio codecs.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.Audio'.
:param pulumi.Input[int] bitrate: The bitrate, in bits per second, of the output encoded audio.
:param pulumi.Input[int] channels: The number of channels in the audio.
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
:param pulumi.Input[int] sampling_rate: The sampling rate to use for encoding in hertz.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.Audio')
if bitrate is not None:
pulumi.set(__self__, "bitrate", bitrate)
if channels is not None:
pulumi.set(__self__, "channels", channels)
if label is not None:
pulumi.set(__self__, "label", label)
if sampling_rate is not None:
pulumi.set(__self__, "sampling_rate", sampling_rate)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.Audio'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def bitrate(self) -> Optional[pulumi.Input[int]]:
"""
The bitrate, in bits per second, of the output encoded audio.
"""
return pulumi.get(self, "bitrate")
@bitrate.setter
def bitrate(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "bitrate", value)
@property
@pulumi.getter
def channels(self) -> Optional[pulumi.Input[int]]:
"""
The number of channels in the audio.
"""
return pulumi.get(self, "channels")
@channels.setter
def channels(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "channels", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter(name="samplingRate")
def sampling_rate(self) -> Optional[pulumi.Input[int]]:
"""
The sampling rate to use for encoding in hertz.
"""
return pulumi.get(self, "sampling_rate")
@sampling_rate.setter
def sampling_rate(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "sampling_rate", value)
@pulumi.input_type
class AudioAnalyzerPresetArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
audio_language: Optional[pulumi.Input[str]] = None,
experimental_options: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The Audio Analyzer preset applies a pre-defined set of AI-based analysis operations, including speech transcription. Currently, the preset supports processing of content with a single audio track.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.AudioAnalyzerPreset'.
:param pulumi.Input[str] audio_language: The language for the audio payload in the input using the BCP-47 format of 'language tag-region' (e.g: 'en-US'). If you know the language of your content, it is recommended that you specify it. If the language isn't specified or set to null, automatic language detection will choose the first language detected and process with the selected language for the duration of the file. It does not currently support dynamically switching between languages after the first language is detected. The automatic detection works best with audio recordings with clearly discernable speech. If automatic detection fails to find the language, transcription would fallback to 'en-US'." The list of supported languages is available here: https://go.microsoft.com/fwlink/?linkid=2109463
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] experimental_options: Dictionary containing key value pairs for parameters not exposed in the preset itself
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.AudioAnalyzerPreset')
if audio_language is not None:
pulumi.set(__self__, "audio_language", audio_language)
if experimental_options is not None:
pulumi.set(__self__, "experimental_options", experimental_options)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.AudioAnalyzerPreset'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="audioLanguage")
def audio_language(self) -> Optional[pulumi.Input[str]]:
"""
The language for the audio payload in the input using the BCP-47 format of 'language tag-region' (e.g: 'en-US'). If you know the language of your content, it is recommended that you specify it. If the language isn't specified or set to null, automatic language detection will choose the first language detected and process with the selected language for the duration of the file. It does not currently support dynamically switching between languages after the first language is detected. The automatic detection works best with audio recordings with clearly discernable speech. If automatic detection fails to find the language, transcription would fallback to 'en-US'." The list of supported languages is available here: https://go.microsoft.com/fwlink/?linkid=2109463
"""
return pulumi.get(self, "audio_language")
@audio_language.setter
def audio_language(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "audio_language", value)
@property
@pulumi.getter(name="experimentalOptions")
def experimental_options(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Dictionary containing key value pairs for parameters not exposed in the preset itself
"""
return pulumi.get(self, "experimental_options")
@experimental_options.setter
def experimental_options(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "experimental_options", value)
@pulumi.input_type
class AudioOverlayArgs:
def __init__(__self__, *,
input_label: pulumi.Input[str],
odata_type: pulumi.Input[str],
audio_gain_level: Optional[pulumi.Input[float]] = None,
end: Optional[pulumi.Input[str]] = None,
fade_in_duration: Optional[pulumi.Input[str]] = None,
fade_out_duration: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input[str]] = None):
"""
Describes the properties of an audio overlay.
:param pulumi.Input[str] input_label: The label of the job input which is to be used as an overlay. The Input must specify exactly one file. You can specify an image file in JPG or PNG formats, or an audio file (such as a WAV, MP3, WMA or M4A file), or a video file. See https://aka.ms/mesformats for the complete list of supported audio and video file formats.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.AudioOverlay'.
:param pulumi.Input[float] audio_gain_level: The gain level of audio in the overlay. The value should be in the range [0, 1.0]. The default is 1.0.
:param pulumi.Input[str] end: The position in the input video at which the overlay ends. The value should be in ISO 8601 duration format. For example, PT30S to end the overlay at 30 seconds in to the input video. If not specified the overlay will be applied until the end of the input video if inputLoop is true. Else, if inputLoop is false, then overlay will last as long as the duration of the overlay media.
:param pulumi.Input[str] fade_in_duration: The duration over which the overlay fades in onto the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade in (same as PT0S).
:param pulumi.Input[str] fade_out_duration: The duration over which the overlay fades out of the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade out (same as PT0S).
:param pulumi.Input[str] start: The start position, with reference to the input video, at which the overlay starts. The value should be in ISO 8601 format. For example, PT05S to start the overlay at 5 seconds in to the input video. If not specified the overlay starts from the beginning of the input video.
"""
pulumi.set(__self__, "input_label", input_label)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.AudioOverlay')
if audio_gain_level is not None:
pulumi.set(__self__, "audio_gain_level", audio_gain_level)
if end is not None:
pulumi.set(__self__, "end", end)
if fade_in_duration is not None:
pulumi.set(__self__, "fade_in_duration", fade_in_duration)
if fade_out_duration is not None:
pulumi.set(__self__, "fade_out_duration", fade_out_duration)
if start is not None:
pulumi.set(__self__, "start", start)
@property
@pulumi.getter(name="inputLabel")
def input_label(self) -> pulumi.Input[str]:
"""
The label of the job input which is to be used as an overlay. The Input must specify exactly one file. You can specify an image file in JPG or PNG formats, or an audio file (such as a WAV, MP3, WMA or M4A file), or a video file. See https://aka.ms/mesformats for the complete list of supported audio and video file formats.
"""
return pulumi.get(self, "input_label")
@input_label.setter
def input_label(self, value: pulumi.Input[str]):
pulumi.set(self, "input_label", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.AudioOverlay'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="audioGainLevel")
def audio_gain_level(self) -> Optional[pulumi.Input[float]]:
"""
The gain level of audio in the overlay. The value should be in the range [0, 1.0]. The default is 1.0.
"""
return pulumi.get(self, "audio_gain_level")
@audio_gain_level.setter
def audio_gain_level(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "audio_gain_level", value)
@property
@pulumi.getter
def end(self) -> Optional[pulumi.Input[str]]:
"""
The position in the input video at which the overlay ends. The value should be in ISO 8601 duration format. For example, PT30S to end the overlay at 30 seconds in to the input video. If not specified the overlay will be applied until the end of the input video if inputLoop is true. Else, if inputLoop is false, then overlay will last as long as the duration of the overlay media.
"""
return pulumi.get(self, "end")
@end.setter
def end(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "end", value)
@property
@pulumi.getter(name="fadeInDuration")
def fade_in_duration(self) -> Optional[pulumi.Input[str]]:
"""
The duration over which the overlay fades in onto the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade in (same as PT0S).
"""
return pulumi.get(self, "fade_in_duration")
@fade_in_duration.setter
def fade_in_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fade_in_duration", value)
@property
@pulumi.getter(name="fadeOutDuration")
def fade_out_duration(self) -> Optional[pulumi.Input[str]]:
"""
The duration over which the overlay fades out of the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade out (same as PT0S).
"""
return pulumi.get(self, "fade_out_duration")
@fade_out_duration.setter
def fade_out_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fade_out_duration", value)
@property
@pulumi.getter
def start(self) -> Optional[pulumi.Input[str]]:
"""
The start position, with reference to the input video, at which the overlay starts. The value should be in ISO 8601 format. For example, PT05S to start the overlay at 5 seconds in to the input video. If not specified the overlay starts from the beginning of the input video.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "start", value)
@pulumi.input_type
class BuiltInStandardEncoderPresetArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
preset_name: pulumi.Input[Union[str, 'EncoderNamedPreset']]):
"""
Describes a built-in preset for encoding the input video with the Standard Encoder.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.BuiltInStandardEncoderPreset'.
:param pulumi.Input[Union[str, 'EncoderNamedPreset']] preset_name: The built-in preset to be used for encoding videos.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.BuiltInStandardEncoderPreset')
pulumi.set(__self__, "preset_name", preset_name)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.BuiltInStandardEncoderPreset'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="presetName")
def preset_name(self) -> pulumi.Input[Union[str, 'EncoderNamedPreset']]:
"""
The built-in preset to be used for encoding videos.
"""
return pulumi.get(self, "preset_name")
@preset_name.setter
def preset_name(self, value: pulumi.Input[Union[str, 'EncoderNamedPreset']]):
pulumi.set(self, "preset_name", value)
@pulumi.input_type
class CbcsDrmConfigurationArgs:
def __init__(__self__, *,
fair_play: Optional[pulumi.Input['StreamingPolicyFairPlayConfigurationArgs']] = None,
play_ready: Optional[pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs']] = None,
widevine: Optional[pulumi.Input['StreamingPolicyWidevineConfigurationArgs']] = None):
"""
Class to specify DRM configurations of CommonEncryptionCbcs scheme in Streaming Policy
:param pulumi.Input['StreamingPolicyFairPlayConfigurationArgs'] fair_play: FairPlay configurations
:param pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs'] play_ready: PlayReady configurations
:param pulumi.Input['StreamingPolicyWidevineConfigurationArgs'] widevine: Widevine configurations
"""
if fair_play is not None:
pulumi.set(__self__, "fair_play", fair_play)
if play_ready is not None:
pulumi.set(__self__, "play_ready", play_ready)
if widevine is not None:
pulumi.set(__self__, "widevine", widevine)
@property
@pulumi.getter(name="fairPlay")
def fair_play(self) -> Optional[pulumi.Input['StreamingPolicyFairPlayConfigurationArgs']]:
"""
FairPlay configurations
"""
return pulumi.get(self, "fair_play")
@fair_play.setter
def fair_play(self, value: Optional[pulumi.Input['StreamingPolicyFairPlayConfigurationArgs']]):
pulumi.set(self, "fair_play", value)
@property
@pulumi.getter(name="playReady")
def play_ready(self) -> Optional[pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs']]:
"""
PlayReady configurations
"""
return pulumi.get(self, "play_ready")
@play_ready.setter
def play_ready(self, value: Optional[pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs']]):
pulumi.set(self, "play_ready", value)
@property
@pulumi.getter
def widevine(self) -> Optional[pulumi.Input['StreamingPolicyWidevineConfigurationArgs']]:
"""
Widevine configurations
"""
return pulumi.get(self, "widevine")
@widevine.setter
def widevine(self, value: Optional[pulumi.Input['StreamingPolicyWidevineConfigurationArgs']]):
pulumi.set(self, "widevine", value)
@pulumi.input_type
class CencDrmConfigurationArgs:
def __init__(__self__, *,
play_ready: Optional[pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs']] = None,
widevine: Optional[pulumi.Input['StreamingPolicyWidevineConfigurationArgs']] = None):
"""
Class to specify DRM configurations of CommonEncryptionCenc scheme in Streaming Policy
:param pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs'] play_ready: PlayReady configurations
:param pulumi.Input['StreamingPolicyWidevineConfigurationArgs'] widevine: Widevine configurations
"""
if play_ready is not None:
pulumi.set(__self__, "play_ready", play_ready)
if widevine is not None:
pulumi.set(__self__, "widevine", widevine)
@property
@pulumi.getter(name="playReady")
def play_ready(self) -> Optional[pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs']]:
"""
PlayReady configurations
"""
return pulumi.get(self, "play_ready")
@play_ready.setter
def play_ready(self, value: Optional[pulumi.Input['StreamingPolicyPlayReadyConfigurationArgs']]):
pulumi.set(self, "play_ready", value)
@property
@pulumi.getter
def widevine(self) -> Optional[pulumi.Input['StreamingPolicyWidevineConfigurationArgs']]:
"""
Widevine configurations
"""
return pulumi.get(self, "widevine")
@widevine.setter
def widevine(self, value: Optional[pulumi.Input['StreamingPolicyWidevineConfigurationArgs']]):
pulumi.set(self, "widevine", value)
@pulumi.input_type
class CommonEncryptionCbcsArgs:
def __init__(__self__, *,
clear_tracks: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]] = None,
content_keys: Optional[pulumi.Input['StreamingPolicyContentKeysArgs']] = None,
drm: Optional[pulumi.Input['CbcsDrmConfigurationArgs']] = None,
enabled_protocols: Optional[pulumi.Input['EnabledProtocolsArgs']] = None):
"""
Class for CommonEncryptionCbcs encryption scheme
:param pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]] clear_tracks: Representing which tracks should not be encrypted
:param pulumi.Input['StreamingPolicyContentKeysArgs'] content_keys: Representing default content key for each encryption scheme and separate content keys for specific tracks
:param pulumi.Input['CbcsDrmConfigurationArgs'] drm: Configuration of DRMs for current encryption scheme
:param pulumi.Input['EnabledProtocolsArgs'] enabled_protocols: Representing supported protocols
"""
if clear_tracks is not None:
pulumi.set(__self__, "clear_tracks", clear_tracks)
if content_keys is not None:
pulumi.set(__self__, "content_keys", content_keys)
if drm is not None:
pulumi.set(__self__, "drm", drm)
if enabled_protocols is not None:
pulumi.set(__self__, "enabled_protocols", enabled_protocols)
@property
@pulumi.getter(name="clearTracks")
def clear_tracks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]:
"""
Representing which tracks should not be encrypted
"""
return pulumi.get(self, "clear_tracks")
@clear_tracks.setter
def clear_tracks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]):
pulumi.set(self, "clear_tracks", value)
@property
@pulumi.getter(name="contentKeys")
def content_keys(self) -> Optional[pulumi.Input['StreamingPolicyContentKeysArgs']]:
"""
Representing default content key for each encryption scheme and separate content keys for specific tracks
"""
return pulumi.get(self, "content_keys")
@content_keys.setter
def content_keys(self, value: Optional[pulumi.Input['StreamingPolicyContentKeysArgs']]):
pulumi.set(self, "content_keys", value)
@property
@pulumi.getter
def drm(self) -> Optional[pulumi.Input['CbcsDrmConfigurationArgs']]:
"""
Configuration of DRMs for current encryption scheme
"""
return pulumi.get(self, "drm")
@drm.setter
def drm(self, value: Optional[pulumi.Input['CbcsDrmConfigurationArgs']]):
pulumi.set(self, "drm", value)
@property
@pulumi.getter(name="enabledProtocols")
def enabled_protocols(self) -> Optional[pulumi.Input['EnabledProtocolsArgs']]:
"""
Representing supported protocols
"""
return pulumi.get(self, "enabled_protocols")
@enabled_protocols.setter
def enabled_protocols(self, value: Optional[pulumi.Input['EnabledProtocolsArgs']]):
pulumi.set(self, "enabled_protocols", value)
@pulumi.input_type
class CommonEncryptionCencArgs:
def __init__(__self__, *,
clear_tracks: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]] = None,
content_keys: Optional[pulumi.Input['StreamingPolicyContentKeysArgs']] = None,
drm: Optional[pulumi.Input['CencDrmConfigurationArgs']] = None,
enabled_protocols: Optional[pulumi.Input['EnabledProtocolsArgs']] = None):
"""
Class for envelope encryption scheme
:param pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]] clear_tracks: Representing which tracks should not be encrypted
:param pulumi.Input['StreamingPolicyContentKeysArgs'] content_keys: Representing default content key for each encryption scheme and separate content keys for specific tracks
:param pulumi.Input['CencDrmConfigurationArgs'] drm: Configuration of DRMs for CommonEncryptionCenc encryption scheme
:param pulumi.Input['EnabledProtocolsArgs'] enabled_protocols: Representing supported protocols
"""
if clear_tracks is not None:
pulumi.set(__self__, "clear_tracks", clear_tracks)
if content_keys is not None:
pulumi.set(__self__, "content_keys", content_keys)
if drm is not None:
pulumi.set(__self__, "drm", drm)
if enabled_protocols is not None:
pulumi.set(__self__, "enabled_protocols", enabled_protocols)
@property
@pulumi.getter(name="clearTracks")
def clear_tracks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]:
"""
Representing which tracks should not be encrypted
"""
return pulumi.get(self, "clear_tracks")
@clear_tracks.setter
def clear_tracks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]):
pulumi.set(self, "clear_tracks", value)
@property
@pulumi.getter(name="contentKeys")
def content_keys(self) -> Optional[pulumi.Input['StreamingPolicyContentKeysArgs']]:
"""
Representing default content key for each encryption scheme and separate content keys for specific tracks
"""
return pulumi.get(self, "content_keys")
@content_keys.setter
def content_keys(self, value: Optional[pulumi.Input['StreamingPolicyContentKeysArgs']]):
pulumi.set(self, "content_keys", value)
@property
@pulumi.getter
def drm(self) -> Optional[pulumi.Input['CencDrmConfigurationArgs']]:
"""
Configuration of DRMs for CommonEncryptionCenc encryption scheme
"""
return pulumi.get(self, "drm")
@drm.setter
def drm(self, value: Optional[pulumi.Input['CencDrmConfigurationArgs']]):
pulumi.set(self, "drm", value)
@property
@pulumi.getter(name="enabledProtocols")
def enabled_protocols(self) -> Optional[pulumi.Input['EnabledProtocolsArgs']]:
"""
Representing supported protocols
"""
return pulumi.get(self, "enabled_protocols")
@enabled_protocols.setter
def enabled_protocols(self, value: Optional[pulumi.Input['EnabledProtocolsArgs']]):
pulumi.set(self, "enabled_protocols", value)
@pulumi.input_type
class ContentKeyPolicyClearKeyConfigurationArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str]):
"""
Represents a configuration for non-DRM keys.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyClearKeyConfiguration'.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyClearKeyConfiguration')
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyClearKeyConfiguration'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicyFairPlayConfigurationArgs:
def __init__(__self__, *,
ask: pulumi.Input[str],
fair_play_pfx: pulumi.Input[str],
fair_play_pfx_password: pulumi.Input[str],
odata_type: pulumi.Input[str],
rental_and_lease_key_type: pulumi.Input[Union[str, 'ContentKeyPolicyFairPlayRentalAndLeaseKeyType']],
rental_duration: pulumi.Input[float],
offline_rental_configuration: Optional[pulumi.Input['ContentKeyPolicyFairPlayOfflineRentalConfigurationArgs']] = None):
"""
Specifies a configuration for FairPlay licenses.
:param pulumi.Input[str] ask: The key that must be used as FairPlay Application Secret key.
:param pulumi.Input[str] fair_play_pfx: The Base64 representation of FairPlay certificate in PKCS 12 (pfx) format (including private key).
:param pulumi.Input[str] fair_play_pfx_password: The password encrypting FairPlay certificate in PKCS 12 (pfx) format.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyFairPlayConfiguration'.
:param pulumi.Input[Union[str, 'ContentKeyPolicyFairPlayRentalAndLeaseKeyType']] rental_and_lease_key_type: The rental and lease key type.
:param pulumi.Input[float] rental_duration: The rental duration. Must be greater than or equal to 0.
:param pulumi.Input['ContentKeyPolicyFairPlayOfflineRentalConfigurationArgs'] offline_rental_configuration: Offline rental policy
"""
pulumi.set(__self__, "ask", ask)
pulumi.set(__self__, "fair_play_pfx", fair_play_pfx)
pulumi.set(__self__, "fair_play_pfx_password", fair_play_pfx_password)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyFairPlayConfiguration')
pulumi.set(__self__, "rental_and_lease_key_type", rental_and_lease_key_type)
pulumi.set(__self__, "rental_duration", rental_duration)
if offline_rental_configuration is not None:
pulumi.set(__self__, "offline_rental_configuration", offline_rental_configuration)
@property
@pulumi.getter
def ask(self) -> pulumi.Input[str]:
"""
The key that must be used as FairPlay Application Secret key.
"""
return pulumi.get(self, "ask")
@ask.setter
def ask(self, value: pulumi.Input[str]):
pulumi.set(self, "ask", value)
@property
@pulumi.getter(name="fairPlayPfx")
def fair_play_pfx(self) -> pulumi.Input[str]:
"""
The Base64 representation of FairPlay certificate in PKCS 12 (pfx) format (including private key).
"""
return pulumi.get(self, "fair_play_pfx")
@fair_play_pfx.setter
def fair_play_pfx(self, value: pulumi.Input[str]):
pulumi.set(self, "fair_play_pfx", value)
@property
@pulumi.getter(name="fairPlayPfxPassword")
def fair_play_pfx_password(self) -> pulumi.Input[str]:
"""
The password encrypting FairPlay certificate in PKCS 12 (pfx) format.
"""
return pulumi.get(self, "fair_play_pfx_password")
@fair_play_pfx_password.setter
def fair_play_pfx_password(self, value: pulumi.Input[str]):
pulumi.set(self, "fair_play_pfx_password", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyFairPlayConfiguration'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="rentalAndLeaseKeyType")
def rental_and_lease_key_type(self) -> pulumi.Input[Union[str, 'ContentKeyPolicyFairPlayRentalAndLeaseKeyType']]:
"""
The rental and lease key type.
"""
return pulumi.get(self, "rental_and_lease_key_type")
@rental_and_lease_key_type.setter
def rental_and_lease_key_type(self, value: pulumi.Input[Union[str, 'ContentKeyPolicyFairPlayRentalAndLeaseKeyType']]):
pulumi.set(self, "rental_and_lease_key_type", value)
@property
@pulumi.getter(name="rentalDuration")
def rental_duration(self) -> pulumi.Input[float]:
"""
The rental duration. Must be greater than or equal to 0.
"""
return pulumi.get(self, "rental_duration")
@rental_duration.setter
def rental_duration(self, value: pulumi.Input[float]):
pulumi.set(self, "rental_duration", value)
@property
@pulumi.getter(name="offlineRentalConfiguration")
def offline_rental_configuration(self) -> Optional[pulumi.Input['ContentKeyPolicyFairPlayOfflineRentalConfigurationArgs']]:
"""
Offline rental policy
"""
return pulumi.get(self, "offline_rental_configuration")
@offline_rental_configuration.setter
def offline_rental_configuration(self, value: Optional[pulumi.Input['ContentKeyPolicyFairPlayOfflineRentalConfigurationArgs']]):
pulumi.set(self, "offline_rental_configuration", value)
@pulumi.input_type
class ContentKeyPolicyFairPlayOfflineRentalConfigurationArgs:
def __init__(__self__, *,
playback_duration_seconds: pulumi.Input[float],
storage_duration_seconds: pulumi.Input[float]):
"""
:param pulumi.Input[float] playback_duration_seconds: Playback duration
:param pulumi.Input[float] storage_duration_seconds: Storage duration
"""
pulumi.set(__self__, "playback_duration_seconds", playback_duration_seconds)
pulumi.set(__self__, "storage_duration_seconds", storage_duration_seconds)
@property
@pulumi.getter(name="playbackDurationSeconds")
def playback_duration_seconds(self) -> pulumi.Input[float]:
"""
Playback duration
"""
return pulumi.get(self, "playback_duration_seconds")
@playback_duration_seconds.setter
def playback_duration_seconds(self, value: pulumi.Input[float]):
pulumi.set(self, "playback_duration_seconds", value)
@property
@pulumi.getter(name="storageDurationSeconds")
def storage_duration_seconds(self) -> pulumi.Input[float]:
"""
Storage duration
"""
return pulumi.get(self, "storage_duration_seconds")
@storage_duration_seconds.setter
def storage_duration_seconds(self, value: pulumi.Input[float]):
pulumi.set(self, "storage_duration_seconds", value)
@pulumi.input_type
class ContentKeyPolicyOpenRestrictionArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str]):
"""
Represents an open restriction. License or key will be delivered on every request.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyOpenRestriction'.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyOpenRestriction')
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyOpenRestriction'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicyOptionArgs:
def __init__(__self__, *,
configuration: pulumi.Input[Union['ContentKeyPolicyClearKeyConfigurationArgs', 'ContentKeyPolicyFairPlayConfigurationArgs', 'ContentKeyPolicyPlayReadyConfigurationArgs', 'ContentKeyPolicyUnknownConfigurationArgs', 'ContentKeyPolicyWidevineConfigurationArgs']],
restriction: pulumi.Input[Union['ContentKeyPolicyOpenRestrictionArgs', 'ContentKeyPolicyTokenRestrictionArgs', 'ContentKeyPolicyUnknownRestrictionArgs']],
name: Optional[pulumi.Input[str]] = None):
"""
Represents a policy option.
:param pulumi.Input[Union['ContentKeyPolicyClearKeyConfigurationArgs', 'ContentKeyPolicyFairPlayConfigurationArgs', 'ContentKeyPolicyPlayReadyConfigurationArgs', 'ContentKeyPolicyUnknownConfigurationArgs', 'ContentKeyPolicyWidevineConfigurationArgs']] configuration: The key delivery configuration.
:param pulumi.Input[Union['ContentKeyPolicyOpenRestrictionArgs', 'ContentKeyPolicyTokenRestrictionArgs', 'ContentKeyPolicyUnknownRestrictionArgs']] restriction: The requirements that must be met to deliver keys with this configuration
:param pulumi.Input[str] name: The Policy Option description.
"""
pulumi.set(__self__, "configuration", configuration)
pulumi.set(__self__, "restriction", restriction)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def configuration(self) -> pulumi.Input[Union['ContentKeyPolicyClearKeyConfigurationArgs', 'ContentKeyPolicyFairPlayConfigurationArgs', 'ContentKeyPolicyPlayReadyConfigurationArgs', 'ContentKeyPolicyUnknownConfigurationArgs', 'ContentKeyPolicyWidevineConfigurationArgs']]:
"""
The key delivery configuration.
"""
return pulumi.get(self, "configuration")
@configuration.setter
def configuration(self, value: pulumi.Input[Union['ContentKeyPolicyClearKeyConfigurationArgs', 'ContentKeyPolicyFairPlayConfigurationArgs', 'ContentKeyPolicyPlayReadyConfigurationArgs', 'ContentKeyPolicyUnknownConfigurationArgs', 'ContentKeyPolicyWidevineConfigurationArgs']]):
pulumi.set(self, "configuration", value)
@property
@pulumi.getter
def restriction(self) -> pulumi.Input[Union['ContentKeyPolicyOpenRestrictionArgs', 'ContentKeyPolicyTokenRestrictionArgs', 'ContentKeyPolicyUnknownRestrictionArgs']]:
"""
The requirements that must be met to deliver keys with this configuration
"""
return pulumi.get(self, "restriction")
@restriction.setter
def restriction(self, value: pulumi.Input[Union['ContentKeyPolicyOpenRestrictionArgs', 'ContentKeyPolicyTokenRestrictionArgs', 'ContentKeyPolicyUnknownRestrictionArgs']]):
pulumi.set(self, "restriction", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The Policy Option description.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class ContentKeyPolicyPlayReadyConfigurationArgs:
def __init__(__self__, *,
licenses: pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyPlayReadyLicenseArgs']]],
odata_type: pulumi.Input[str],
response_custom_data: Optional[pulumi.Input[str]] = None):
"""
Specifies a configuration for PlayReady licenses.
:param pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyPlayReadyLicenseArgs']]] licenses: The PlayReady licenses.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyPlayReadyConfiguration'.
:param pulumi.Input[str] response_custom_data: The custom response data.
"""
pulumi.set(__self__, "licenses", licenses)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyPlayReadyConfiguration')
if response_custom_data is not None:
pulumi.set(__self__, "response_custom_data", response_custom_data)
@property
@pulumi.getter
def licenses(self) -> pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyPlayReadyLicenseArgs']]]:
"""
The PlayReady licenses.
"""
return pulumi.get(self, "licenses")
@licenses.setter
def licenses(self, value: pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyPlayReadyLicenseArgs']]]):
pulumi.set(self, "licenses", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyPlayReadyConfiguration'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="responseCustomData")
def response_custom_data(self) -> Optional[pulumi.Input[str]]:
"""
The custom response data.
"""
return pulumi.get(self, "response_custom_data")
@response_custom_data.setter
def response_custom_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "response_custom_data", value)
@pulumi.input_type
class ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeaderArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str]):
"""
Specifies that the content key ID is in the PlayReady header.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeader'.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeader')
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeader'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifierArgs:
def __init__(__self__, *,
key_id: pulumi.Input[str],
odata_type: pulumi.Input[str]):
"""
Specifies that the content key ID is specified in the PlayReady configuration.
:param pulumi.Input[str] key_id: The content key ID.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifier'.
"""
pulumi.set(__self__, "key_id", key_id)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifier')
@property
@pulumi.getter(name="keyId")
def key_id(self) -> pulumi.Input[str]:
"""
The content key ID.
"""
return pulumi.get(self, "key_id")
@key_id.setter
def key_id(self, value: pulumi.Input[str]):
pulumi.set(self, "key_id", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifier'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicyPlayReadyExplicitAnalogTelevisionRestrictionArgs:
def __init__(__self__, *,
best_effort: pulumi.Input[bool],
configuration_data: pulumi.Input[int]):
"""
Configures the Explicit Analog Television Output Restriction control bits. For further details see the PlayReady Compliance Rules.
:param pulumi.Input[bool] best_effort: Indicates whether this restriction is enforced on a Best Effort basis.
:param pulumi.Input[int] configuration_data: Configures the restriction control bits. Must be between 0 and 3 inclusive.
"""
pulumi.set(__self__, "best_effort", best_effort)
pulumi.set(__self__, "configuration_data", configuration_data)
@property
@pulumi.getter(name="bestEffort")
def best_effort(self) -> pulumi.Input[bool]:
"""
Indicates whether this restriction is enforced on a Best Effort basis.
"""
return pulumi.get(self, "best_effort")
@best_effort.setter
def best_effort(self, value: pulumi.Input[bool]):
pulumi.set(self, "best_effort", value)
@property
@pulumi.getter(name="configurationData")
def configuration_data(self) -> pulumi.Input[int]:
"""
Configures the restriction control bits. Must be between 0 and 3 inclusive.
"""
return pulumi.get(self, "configuration_data")
@configuration_data.setter
def configuration_data(self, value: pulumi.Input[int]):
pulumi.set(self, "configuration_data", value)
@pulumi.input_type
class ContentKeyPolicyPlayReadyLicenseArgs:
def __init__(__self__, *,
allow_test_devices: pulumi.Input[bool],
content_key_location: pulumi.Input[Union['ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeaderArgs', 'ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifierArgs']],
content_type: pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyContentType']],
license_type: pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyLicenseType']],
begin_date: Optional[pulumi.Input[str]] = None,
expiration_date: Optional[pulumi.Input[str]] = None,
grace_period: Optional[pulumi.Input[str]] = None,
play_right: Optional[pulumi.Input['ContentKeyPolicyPlayReadyPlayRightArgs']] = None,
relative_begin_date: Optional[pulumi.Input[str]] = None,
relative_expiration_date: Optional[pulumi.Input[str]] = None):
"""
The PlayReady license
:param pulumi.Input[bool] allow_test_devices: A flag indicating whether test devices can use the license.
:param pulumi.Input[Union['ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeaderArgs', 'ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifierArgs']] content_key_location: The content key location.
:param pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyContentType']] content_type: The PlayReady content type.
:param pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyLicenseType']] license_type: The license type.
:param pulumi.Input[str] begin_date: The begin date of license
:param pulumi.Input[str] expiration_date: The expiration date of license.
:param pulumi.Input[str] grace_period: The grace period of license.
:param pulumi.Input['ContentKeyPolicyPlayReadyPlayRightArgs'] play_right: The license PlayRight
:param pulumi.Input[str] relative_begin_date: The relative begin date of license.
:param pulumi.Input[str] relative_expiration_date: The relative expiration date of license.
"""
pulumi.set(__self__, "allow_test_devices", allow_test_devices)
pulumi.set(__self__, "content_key_location", content_key_location)
pulumi.set(__self__, "content_type", content_type)
pulumi.set(__self__, "license_type", license_type)
if begin_date is not None:
pulumi.set(__self__, "begin_date", begin_date)
if expiration_date is not None:
pulumi.set(__self__, "expiration_date", expiration_date)
if grace_period is not None:
pulumi.set(__self__, "grace_period", grace_period)
if play_right is not None:
pulumi.set(__self__, "play_right", play_right)
if relative_begin_date is not None:
pulumi.set(__self__, "relative_begin_date", relative_begin_date)
if relative_expiration_date is not None:
pulumi.set(__self__, "relative_expiration_date", relative_expiration_date)
@property
@pulumi.getter(name="allowTestDevices")
def allow_test_devices(self) -> pulumi.Input[bool]:
"""
A flag indicating whether test devices can use the license.
"""
return pulumi.get(self, "allow_test_devices")
@allow_test_devices.setter
def allow_test_devices(self, value: pulumi.Input[bool]):
pulumi.set(self, "allow_test_devices", value)
@property
@pulumi.getter(name="contentKeyLocation")
def content_key_location(self) -> pulumi.Input[Union['ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeaderArgs', 'ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifierArgs']]:
"""
The content key location.
"""
return pulumi.get(self, "content_key_location")
@content_key_location.setter
def content_key_location(self, value: pulumi.Input[Union['ContentKeyPolicyPlayReadyContentEncryptionKeyFromHeaderArgs', 'ContentKeyPolicyPlayReadyContentEncryptionKeyFromKeyIdentifierArgs']]):
pulumi.set(self, "content_key_location", value)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyContentType']]:
"""
The PlayReady content type.
"""
return pulumi.get(self, "content_type")
@content_type.setter
def content_type(self, value: pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyContentType']]):
pulumi.set(self, "content_type", value)
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyLicenseType']]:
"""
The license type.
"""
return pulumi.get(self, "license_type")
@license_type.setter
def license_type(self, value: pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyLicenseType']]):
pulumi.set(self, "license_type", value)
@property
@pulumi.getter(name="beginDate")
def begin_date(self) -> Optional[pulumi.Input[str]]:
"""
The begin date of license
"""
return pulumi.get(self, "begin_date")
@begin_date.setter
def begin_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "begin_date", value)
@property
@pulumi.getter(name="expirationDate")
def expiration_date(self) -> Optional[pulumi.Input[str]]:
"""
The expiration date of license.
"""
return pulumi.get(self, "expiration_date")
@expiration_date.setter
def expiration_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expiration_date", value)
@property
@pulumi.getter(name="gracePeriod")
def grace_period(self) -> Optional[pulumi.Input[str]]:
"""
The grace period of license.
"""
return pulumi.get(self, "grace_period")
@grace_period.setter
def grace_period(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "grace_period", value)
@property
@pulumi.getter(name="playRight")
def play_right(self) -> Optional[pulumi.Input['ContentKeyPolicyPlayReadyPlayRightArgs']]:
"""
The license PlayRight
"""
return pulumi.get(self, "play_right")
@play_right.setter
def play_right(self, value: Optional[pulumi.Input['ContentKeyPolicyPlayReadyPlayRightArgs']]):
pulumi.set(self, "play_right", value)
@property
@pulumi.getter(name="relativeBeginDate")
def relative_begin_date(self) -> Optional[pulumi.Input[str]]:
"""
The relative begin date of license.
"""
return pulumi.get(self, "relative_begin_date")
@relative_begin_date.setter
def relative_begin_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "relative_begin_date", value)
@property
@pulumi.getter(name="relativeExpirationDate")
def relative_expiration_date(self) -> Optional[pulumi.Input[str]]:
"""
The relative expiration date of license.
"""
return pulumi.get(self, "relative_expiration_date")
@relative_expiration_date.setter
def relative_expiration_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "relative_expiration_date", value)
@pulumi.input_type
class ContentKeyPolicyPlayReadyPlayRightArgs:
def __init__(__self__, *,
allow_passing_video_content_to_unknown_output: pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyUnknownOutputPassingOption']],
digital_video_only_content_restriction: pulumi.Input[bool],
image_constraint_for_analog_component_video_restriction: pulumi.Input[bool],
image_constraint_for_analog_computer_monitor_restriction: pulumi.Input[bool],
agc_and_color_stripe_restriction: Optional[pulumi.Input[int]] = None,
analog_video_opl: Optional[pulumi.Input[int]] = None,
compressed_digital_audio_opl: Optional[pulumi.Input[int]] = None,
compressed_digital_video_opl: Optional[pulumi.Input[int]] = None,
explicit_analog_television_output_restriction: Optional[pulumi.Input['ContentKeyPolicyPlayReadyExplicitAnalogTelevisionRestrictionArgs']] = None,
first_play_expiration: Optional[pulumi.Input[str]] = None,
scms_restriction: Optional[pulumi.Input[int]] = None,
uncompressed_digital_audio_opl: Optional[pulumi.Input[int]] = None,
uncompressed_digital_video_opl: Optional[pulumi.Input[int]] = None):
"""
Configures the Play Right in the PlayReady license.
:param pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyUnknownOutputPassingOption']] allow_passing_video_content_to_unknown_output: Configures Unknown output handling settings of the license.
:param pulumi.Input[bool] digital_video_only_content_restriction: Enables the Image Constraint For Analog Component Video Restriction in the license.
:param pulumi.Input[bool] image_constraint_for_analog_component_video_restriction: Enables the Image Constraint For Analog Component Video Restriction in the license.
:param pulumi.Input[bool] image_constraint_for_analog_computer_monitor_restriction: Enables the Image Constraint For Analog Component Video Restriction in the license.
:param pulumi.Input[int] agc_and_color_stripe_restriction: Configures Automatic Gain Control (AGC) and Color Stripe in the license. Must be between 0 and 3 inclusive.
:param pulumi.Input[int] analog_video_opl: Specifies the output protection level for compressed digital audio.
:param pulumi.Input[int] compressed_digital_audio_opl: Specifies the output protection level for compressed digital audio.
:param pulumi.Input[int] compressed_digital_video_opl: Specifies the output protection level for compressed digital video.
:param pulumi.Input['ContentKeyPolicyPlayReadyExplicitAnalogTelevisionRestrictionArgs'] explicit_analog_television_output_restriction: Configures the Explicit Analog Television Output Restriction in the license. Configuration data must be between 0 and 3 inclusive.
:param pulumi.Input[str] first_play_expiration: The amount of time that the license is valid after the license is first used to play content.
:param pulumi.Input[int] scms_restriction: Configures the Serial Copy Management System (SCMS) in the license. Must be between 0 and 3 inclusive.
:param pulumi.Input[int] uncompressed_digital_audio_opl: Specifies the output protection level for uncompressed digital audio.
:param pulumi.Input[int] uncompressed_digital_video_opl: Specifies the output protection level for uncompressed digital video.
"""
pulumi.set(__self__, "allow_passing_video_content_to_unknown_output", allow_passing_video_content_to_unknown_output)
pulumi.set(__self__, "digital_video_only_content_restriction", digital_video_only_content_restriction)
pulumi.set(__self__, "image_constraint_for_analog_component_video_restriction", image_constraint_for_analog_component_video_restriction)
pulumi.set(__self__, "image_constraint_for_analog_computer_monitor_restriction", image_constraint_for_analog_computer_monitor_restriction)
if agc_and_color_stripe_restriction is not None:
pulumi.set(__self__, "agc_and_color_stripe_restriction", agc_and_color_stripe_restriction)
if analog_video_opl is not None:
pulumi.set(__self__, "analog_video_opl", analog_video_opl)
if compressed_digital_audio_opl is not None:
pulumi.set(__self__, "compressed_digital_audio_opl", compressed_digital_audio_opl)
if compressed_digital_video_opl is not None:
pulumi.set(__self__, "compressed_digital_video_opl", compressed_digital_video_opl)
if explicit_analog_television_output_restriction is not None:
pulumi.set(__self__, "explicit_analog_television_output_restriction", explicit_analog_television_output_restriction)
if first_play_expiration is not None:
pulumi.set(__self__, "first_play_expiration", first_play_expiration)
if scms_restriction is not None:
pulumi.set(__self__, "scms_restriction", scms_restriction)
if uncompressed_digital_audio_opl is not None:
pulumi.set(__self__, "uncompressed_digital_audio_opl", uncompressed_digital_audio_opl)
if uncompressed_digital_video_opl is not None:
pulumi.set(__self__, "uncompressed_digital_video_opl", uncompressed_digital_video_opl)
@property
@pulumi.getter(name="allowPassingVideoContentToUnknownOutput")
def allow_passing_video_content_to_unknown_output(self) -> pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyUnknownOutputPassingOption']]:
"""
Configures Unknown output handling settings of the license.
"""
return pulumi.get(self, "allow_passing_video_content_to_unknown_output")
@allow_passing_video_content_to_unknown_output.setter
def allow_passing_video_content_to_unknown_output(self, value: pulumi.Input[Union[str, 'ContentKeyPolicyPlayReadyUnknownOutputPassingOption']]):
pulumi.set(self, "allow_passing_video_content_to_unknown_output", value)
@property
@pulumi.getter(name="digitalVideoOnlyContentRestriction")
def digital_video_only_content_restriction(self) -> pulumi.Input[bool]:
"""
Enables the Image Constraint For Analog Component Video Restriction in the license.
"""
return pulumi.get(self, "digital_video_only_content_restriction")
@digital_video_only_content_restriction.setter
def digital_video_only_content_restriction(self, value: pulumi.Input[bool]):
pulumi.set(self, "digital_video_only_content_restriction", value)
@property
@pulumi.getter(name="imageConstraintForAnalogComponentVideoRestriction")
def image_constraint_for_analog_component_video_restriction(self) -> pulumi.Input[bool]:
"""
Enables the Image Constraint For Analog Component Video Restriction in the license.
"""
return pulumi.get(self, "image_constraint_for_analog_component_video_restriction")
@image_constraint_for_analog_component_video_restriction.setter
def image_constraint_for_analog_component_video_restriction(self, value: pulumi.Input[bool]):
pulumi.set(self, "image_constraint_for_analog_component_video_restriction", value)
@property
@pulumi.getter(name="imageConstraintForAnalogComputerMonitorRestriction")
def image_constraint_for_analog_computer_monitor_restriction(self) -> pulumi.Input[bool]:
"""
Enables the Image Constraint For Analog Component Video Restriction in the license.
"""
return pulumi.get(self, "image_constraint_for_analog_computer_monitor_restriction")
@image_constraint_for_analog_computer_monitor_restriction.setter
def image_constraint_for_analog_computer_monitor_restriction(self, value: pulumi.Input[bool]):
pulumi.set(self, "image_constraint_for_analog_computer_monitor_restriction", value)
@property
@pulumi.getter(name="agcAndColorStripeRestriction")
def agc_and_color_stripe_restriction(self) -> Optional[pulumi.Input[int]]:
"""
Configures Automatic Gain Control (AGC) and Color Stripe in the license. Must be between 0 and 3 inclusive.
"""
return pulumi.get(self, "agc_and_color_stripe_restriction")
@agc_and_color_stripe_restriction.setter
def agc_and_color_stripe_restriction(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "agc_and_color_stripe_restriction", value)
@property
@pulumi.getter(name="analogVideoOpl")
def analog_video_opl(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the output protection level for compressed digital audio.
"""
return pulumi.get(self, "analog_video_opl")
@analog_video_opl.setter
def analog_video_opl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "analog_video_opl", value)
@property
@pulumi.getter(name="compressedDigitalAudioOpl")
def compressed_digital_audio_opl(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the output protection level for compressed digital audio.
"""
return pulumi.get(self, "compressed_digital_audio_opl")
@compressed_digital_audio_opl.setter
def compressed_digital_audio_opl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "compressed_digital_audio_opl", value)
@property
@pulumi.getter(name="compressedDigitalVideoOpl")
def compressed_digital_video_opl(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the output protection level for compressed digital video.
"""
return pulumi.get(self, "compressed_digital_video_opl")
@compressed_digital_video_opl.setter
def compressed_digital_video_opl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "compressed_digital_video_opl", value)
@property
@pulumi.getter(name="explicitAnalogTelevisionOutputRestriction")
def explicit_analog_television_output_restriction(self) -> Optional[pulumi.Input['ContentKeyPolicyPlayReadyExplicitAnalogTelevisionRestrictionArgs']]:
"""
Configures the Explicit Analog Television Output Restriction in the license. Configuration data must be between 0 and 3 inclusive.
"""
return pulumi.get(self, "explicit_analog_television_output_restriction")
@explicit_analog_television_output_restriction.setter
def explicit_analog_television_output_restriction(self, value: Optional[pulumi.Input['ContentKeyPolicyPlayReadyExplicitAnalogTelevisionRestrictionArgs']]):
pulumi.set(self, "explicit_analog_television_output_restriction", value)
@property
@pulumi.getter(name="firstPlayExpiration")
def first_play_expiration(self) -> Optional[pulumi.Input[str]]:
"""
The amount of time that the license is valid after the license is first used to play content.
"""
return pulumi.get(self, "first_play_expiration")
@first_play_expiration.setter
def first_play_expiration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "first_play_expiration", value)
@property
@pulumi.getter(name="scmsRestriction")
def scms_restriction(self) -> Optional[pulumi.Input[int]]:
"""
Configures the Serial Copy Management System (SCMS) in the license. Must be between 0 and 3 inclusive.
"""
return pulumi.get(self, "scms_restriction")
@scms_restriction.setter
def scms_restriction(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "scms_restriction", value)
@property
@pulumi.getter(name="uncompressedDigitalAudioOpl")
def uncompressed_digital_audio_opl(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the output protection level for uncompressed digital audio.
"""
return pulumi.get(self, "uncompressed_digital_audio_opl")
@uncompressed_digital_audio_opl.setter
def uncompressed_digital_audio_opl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "uncompressed_digital_audio_opl", value)
@property
@pulumi.getter(name="uncompressedDigitalVideoOpl")
def uncompressed_digital_video_opl(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the output protection level for uncompressed digital video.
"""
return pulumi.get(self, "uncompressed_digital_video_opl")
@uncompressed_digital_video_opl.setter
def uncompressed_digital_video_opl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "uncompressed_digital_video_opl", value)
@pulumi.input_type
class ContentKeyPolicyRsaTokenKeyArgs:
def __init__(__self__, *,
exponent: pulumi.Input[str],
modulus: pulumi.Input[str],
odata_type: pulumi.Input[str]):
"""
Specifies a RSA key for token validation
:param pulumi.Input[str] exponent: The RSA Parameter exponent
:param pulumi.Input[str] modulus: The RSA Parameter modulus
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyRsaTokenKey'.
"""
pulumi.set(__self__, "exponent", exponent)
pulumi.set(__self__, "modulus", modulus)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyRsaTokenKey')
@property
@pulumi.getter
def exponent(self) -> pulumi.Input[str]:
"""
The RSA Parameter exponent
"""
return pulumi.get(self, "exponent")
@exponent.setter
def exponent(self, value: pulumi.Input[str]):
pulumi.set(self, "exponent", value)
@property
@pulumi.getter
def modulus(self) -> pulumi.Input[str]:
"""
The RSA Parameter modulus
"""
return pulumi.get(self, "modulus")
@modulus.setter
def modulus(self, value: pulumi.Input[str]):
pulumi.set(self, "modulus", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyRsaTokenKey'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicySymmetricTokenKeyArgs:
def __init__(__self__, *,
key_value: pulumi.Input[str],
odata_type: pulumi.Input[str]):
"""
Specifies a symmetric key for token validation.
:param pulumi.Input[str] key_value: The key value of the key
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicySymmetricTokenKey'.
"""
pulumi.set(__self__, "key_value", key_value)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicySymmetricTokenKey')
@property
@pulumi.getter(name="keyValue")
def key_value(self) -> pulumi.Input[str]:
"""
The key value of the key
"""
return pulumi.get(self, "key_value")
@key_value.setter
def key_value(self, value: pulumi.Input[str]):
pulumi.set(self, "key_value", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicySymmetricTokenKey'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicyTokenClaimArgs:
def __init__(__self__, *,
claim_type: Optional[pulumi.Input[str]] = None,
claim_value: Optional[pulumi.Input[str]] = None):
"""
Represents a token claim.
:param pulumi.Input[str] claim_type: Token claim type.
:param pulumi.Input[str] claim_value: Token claim value.
"""
if claim_type is not None:
pulumi.set(__self__, "claim_type", claim_type)
if claim_value is not None:
pulumi.set(__self__, "claim_value", claim_value)
@property
@pulumi.getter(name="claimType")
def claim_type(self) -> Optional[pulumi.Input[str]]:
"""
Token claim type.
"""
return pulumi.get(self, "claim_type")
@claim_type.setter
def claim_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "claim_type", value)
@property
@pulumi.getter(name="claimValue")
def claim_value(self) -> Optional[pulumi.Input[str]]:
"""
Token claim value.
"""
return pulumi.get(self, "claim_value")
@claim_value.setter
def claim_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "claim_value", value)
@pulumi.input_type
class ContentKeyPolicyTokenRestrictionArgs:
def __init__(__self__, *,
audience: pulumi.Input[str],
issuer: pulumi.Input[str],
odata_type: pulumi.Input[str],
primary_verification_key: pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']],
restriction_token_type: pulumi.Input[Union[str, 'ContentKeyPolicyRestrictionTokenType']],
alternate_verification_keys: Optional[pulumi.Input[Sequence[pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']]]]] = None,
open_id_connect_discovery_document: Optional[pulumi.Input[str]] = None,
required_claims: Optional[pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyTokenClaimArgs']]]] = None):
"""
Represents a token restriction. Provided token must match these requirements for successful license or key delivery.
:param pulumi.Input[str] audience: The audience for the token.
:param pulumi.Input[str] issuer: The token issuer.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyTokenRestriction'.
:param pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']] primary_verification_key: The primary verification key.
:param pulumi.Input[Union[str, 'ContentKeyPolicyRestrictionTokenType']] restriction_token_type: The type of token.
:param pulumi.Input[Sequence[pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']]]] alternate_verification_keys: A list of alternative verification keys.
:param pulumi.Input[str] open_id_connect_discovery_document: The OpenID connect discovery document.
:param pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyTokenClaimArgs']]] required_claims: A list of required token claims.
"""
pulumi.set(__self__, "audience", audience)
pulumi.set(__self__, "issuer", issuer)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyTokenRestriction')
pulumi.set(__self__, "primary_verification_key", primary_verification_key)
pulumi.set(__self__, "restriction_token_type", restriction_token_type)
if alternate_verification_keys is not None:
pulumi.set(__self__, "alternate_verification_keys", alternate_verification_keys)
if open_id_connect_discovery_document is not None:
pulumi.set(__self__, "open_id_connect_discovery_document", open_id_connect_discovery_document)
if required_claims is not None:
pulumi.set(__self__, "required_claims", required_claims)
@property
@pulumi.getter
def audience(self) -> pulumi.Input[str]:
"""
The audience for the token.
"""
return pulumi.get(self, "audience")
@audience.setter
def audience(self, value: pulumi.Input[str]):
pulumi.set(self, "audience", value)
@property
@pulumi.getter
def issuer(self) -> pulumi.Input[str]:
"""
The token issuer.
"""
return pulumi.get(self, "issuer")
@issuer.setter
def issuer(self, value: pulumi.Input[str]):
pulumi.set(self, "issuer", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyTokenRestriction'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="primaryVerificationKey")
def primary_verification_key(self) -> pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']]:
"""
The primary verification key.
"""
return pulumi.get(self, "primary_verification_key")
@primary_verification_key.setter
def primary_verification_key(self, value: pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']]):
pulumi.set(self, "primary_verification_key", value)
@property
@pulumi.getter(name="restrictionTokenType")
def restriction_token_type(self) -> pulumi.Input[Union[str, 'ContentKeyPolicyRestrictionTokenType']]:
"""
The type of token.
"""
return pulumi.get(self, "restriction_token_type")
@restriction_token_type.setter
def restriction_token_type(self, value: pulumi.Input[Union[str, 'ContentKeyPolicyRestrictionTokenType']]):
pulumi.set(self, "restriction_token_type", value)
@property
@pulumi.getter(name="alternateVerificationKeys")
def alternate_verification_keys(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']]]]]:
"""
A list of alternative verification keys.
"""
return pulumi.get(self, "alternate_verification_keys")
@alternate_verification_keys.setter
def alternate_verification_keys(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union['ContentKeyPolicyRsaTokenKeyArgs', 'ContentKeyPolicySymmetricTokenKeyArgs', 'ContentKeyPolicyX509CertificateTokenKeyArgs']]]]]):
pulumi.set(self, "alternate_verification_keys", value)
@property
@pulumi.getter(name="openIdConnectDiscoveryDocument")
def open_id_connect_discovery_document(self) -> Optional[pulumi.Input[str]]:
"""
The OpenID connect discovery document.
"""
return pulumi.get(self, "open_id_connect_discovery_document")
@open_id_connect_discovery_document.setter
def open_id_connect_discovery_document(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "open_id_connect_discovery_document", value)
@property
@pulumi.getter(name="requiredClaims")
def required_claims(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyTokenClaimArgs']]]]:
"""
A list of required token claims.
"""
return pulumi.get(self, "required_claims")
@required_claims.setter
def required_claims(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ContentKeyPolicyTokenClaimArgs']]]]):
pulumi.set(self, "required_claims", value)
@pulumi.input_type
class ContentKeyPolicyUnknownConfigurationArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str]):
"""
Represents a ContentKeyPolicyConfiguration that is unavailable in the current API version.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyUnknownConfiguration'.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyUnknownConfiguration')
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyUnknownConfiguration'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicyUnknownRestrictionArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str]):
"""
Represents a ContentKeyPolicyRestriction that is unavailable in the current API version.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyUnknownRestriction'.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyUnknownRestriction')
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyUnknownRestriction'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class ContentKeyPolicyWidevineConfigurationArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
widevine_template: pulumi.Input[str]):
"""
Specifies a configuration for Widevine licenses.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyWidevineConfiguration'.
:param pulumi.Input[str] widevine_template: The Widevine template.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyWidevineConfiguration')
pulumi.set(__self__, "widevine_template", widevine_template)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyWidevineConfiguration'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="widevineTemplate")
def widevine_template(self) -> pulumi.Input[str]:
"""
The Widevine template.
"""
return pulumi.get(self, "widevine_template")
@widevine_template.setter
def widevine_template(self, value: pulumi.Input[str]):
pulumi.set(self, "widevine_template", value)
@pulumi.input_type
class ContentKeyPolicyX509CertificateTokenKeyArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
raw_body: pulumi.Input[str]):
"""
Specifies a certificate for token validation.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyX509CertificateTokenKey'.
:param pulumi.Input[str] raw_body: The raw data field of a certificate in PKCS 12 format (X509Certificate2 in .NET)
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ContentKeyPolicyX509CertificateTokenKey')
pulumi.set(__self__, "raw_body", raw_body)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ContentKeyPolicyX509CertificateTokenKey'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="rawBody")
def raw_body(self) -> pulumi.Input[str]:
"""
The raw data field of a certificate in PKCS 12 format (X509Certificate2 in .NET)
"""
return pulumi.get(self, "raw_body")
@raw_body.setter
def raw_body(self, value: pulumi.Input[str]):
pulumi.set(self, "raw_body", value)
@pulumi.input_type
class CopyAudioArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
label: Optional[pulumi.Input[str]] = None):
"""
A codec flag, which tells the encoder to copy the input audio bitstream.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.CopyAudio'.
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.CopyAudio')
if label is not None:
pulumi.set(__self__, "label", label)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.CopyAudio'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@pulumi.input_type
class CopyVideoArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
label: Optional[pulumi.Input[str]] = None):
"""
A codec flag, which tells the encoder to copy the input video bitstream without re-encoding.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.CopyVideo'.
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.CopyVideo')
if label is not None:
pulumi.set(__self__, "label", label)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.CopyVideo'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@pulumi.input_type
class CrossSiteAccessPoliciesArgs:
def __init__(__self__, *,
client_access_policy: Optional[pulumi.Input[str]] = None,
cross_domain_policy: Optional[pulumi.Input[str]] = None):
"""
The client access policy.
:param pulumi.Input[str] client_access_policy: The content of clientaccesspolicy.xml used by Silverlight.
:param pulumi.Input[str] cross_domain_policy: The content of crossdomain.xml used by Silverlight.
"""
if client_access_policy is not None:
pulumi.set(__self__, "client_access_policy", client_access_policy)
if cross_domain_policy is not None:
pulumi.set(__self__, "cross_domain_policy", cross_domain_policy)
@property
@pulumi.getter(name="clientAccessPolicy")
def client_access_policy(self) -> Optional[pulumi.Input[str]]:
"""
The content of clientaccesspolicy.xml used by Silverlight.
"""
return pulumi.get(self, "client_access_policy")
@client_access_policy.setter
def client_access_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_access_policy", value)
@property
@pulumi.getter(name="crossDomainPolicy")
def cross_domain_policy(self) -> Optional[pulumi.Input[str]]:
"""
The content of crossdomain.xml used by Silverlight.
"""
return pulumi.get(self, "cross_domain_policy")
@cross_domain_policy.setter
def cross_domain_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cross_domain_policy", value)
@pulumi.input_type
class DefaultKeyArgs:
def __init__(__self__, *,
label: Optional[pulumi.Input[str]] = None,
policy_name: Optional[pulumi.Input[str]] = None):
"""
Class to specify properties of default content key for each encryption scheme
:param pulumi.Input[str] label: Label can be used to specify Content Key when creating a Streaming Locator
:param pulumi.Input[str] policy_name: Policy used by Default Key
"""
if label is not None:
pulumi.set(__self__, "label", label)
if policy_name is not None:
pulumi.set(__self__, "policy_name", policy_name)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
Label can be used to specify Content Key when creating a Streaming Locator
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter(name="policyName")
def policy_name(self) -> Optional[pulumi.Input[str]]:
"""
Policy used by Default Key
"""
return pulumi.get(self, "policy_name")
@policy_name.setter
def policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_name", value)
@pulumi.input_type
class DeinterlaceArgs:
def __init__(__self__, *,
mode: Optional[pulumi.Input[Union[str, 'DeinterlaceMode']]] = None,
parity: Optional[pulumi.Input[Union[str, 'DeinterlaceParity']]] = None):
"""
Describes the de-interlacing settings.
:param pulumi.Input[Union[str, 'DeinterlaceMode']] mode: The deinterlacing mode. Defaults to AutoPixelAdaptive.
:param pulumi.Input[Union[str, 'DeinterlaceParity']] parity: The field parity for de-interlacing, defaults to Auto.
"""
if mode is not None:
pulumi.set(__self__, "mode", mode)
if parity is not None:
pulumi.set(__self__, "parity", parity)
@property
@pulumi.getter
def mode(self) -> Optional[pulumi.Input[Union[str, 'DeinterlaceMode']]]:
"""
The deinterlacing mode. Defaults to AutoPixelAdaptive.
"""
return pulumi.get(self, "mode")
@mode.setter
def mode(self, value: Optional[pulumi.Input[Union[str, 'DeinterlaceMode']]]):
pulumi.set(self, "mode", value)
@property
@pulumi.getter
def parity(self) -> Optional[pulumi.Input[Union[str, 'DeinterlaceParity']]]:
"""
The field parity for de-interlacing, defaults to Auto.
"""
return pulumi.get(self, "parity")
@parity.setter
def parity(self, value: Optional[pulumi.Input[Union[str, 'DeinterlaceParity']]]):
pulumi.set(self, "parity", value)
@pulumi.input_type
class EnabledProtocolsArgs:
def __init__(__self__, *,
dash: pulumi.Input[bool],
download: pulumi.Input[bool],
hls: pulumi.Input[bool],
smooth_streaming: pulumi.Input[bool]):
"""
Class to specify which protocols are enabled
:param pulumi.Input[bool] dash: Enable DASH protocol or not
:param pulumi.Input[bool] download: Enable Download protocol or not
:param pulumi.Input[bool] hls: Enable HLS protocol or not
:param pulumi.Input[bool] smooth_streaming: Enable SmoothStreaming protocol or not
"""
pulumi.set(__self__, "dash", dash)
pulumi.set(__self__, "download", download)
pulumi.set(__self__, "hls", hls)
pulumi.set(__self__, "smooth_streaming", smooth_streaming)
@property
@pulumi.getter
def dash(self) -> pulumi.Input[bool]:
"""
Enable DASH protocol or not
"""
return pulumi.get(self, "dash")
@dash.setter
def dash(self, value: pulumi.Input[bool]):
pulumi.set(self, "dash", value)
@property
@pulumi.getter
def download(self) -> pulumi.Input[bool]:
"""
Enable Download protocol or not
"""
return pulumi.get(self, "download")
@download.setter
def download(self, value: pulumi.Input[bool]):
pulumi.set(self, "download", value)
@property
@pulumi.getter
def hls(self) -> pulumi.Input[bool]:
"""
Enable HLS protocol or not
"""
return pulumi.get(self, "hls")
@hls.setter
def hls(self, value: pulumi.Input[bool]):
pulumi.set(self, "hls", value)
@property
@pulumi.getter(name="smoothStreaming")
def smooth_streaming(self) -> pulumi.Input[bool]:
"""
Enable SmoothStreaming protocol or not
"""
return pulumi.get(self, "smooth_streaming")
@smooth_streaming.setter
def smooth_streaming(self, value: pulumi.Input[bool]):
pulumi.set(self, "smooth_streaming", value)
@pulumi.input_type
class EnvelopeEncryptionArgs:
def __init__(__self__, *,
clear_tracks: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]] = None,
content_keys: Optional[pulumi.Input['StreamingPolicyContentKeysArgs']] = None,
custom_key_acquisition_url_template: Optional[pulumi.Input[str]] = None,
enabled_protocols: Optional[pulumi.Input['EnabledProtocolsArgs']] = None):
"""
Class for EnvelopeEncryption encryption scheme
:param pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]] clear_tracks: Representing which tracks should not be encrypted
:param pulumi.Input['StreamingPolicyContentKeysArgs'] content_keys: Representing default content key for each encryption scheme and separate content keys for specific tracks
:param pulumi.Input[str] custom_key_acquisition_url_template: Template for the URL of the custom service delivering keys to end user players. Not required when using Azure Media Services for issuing keys. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
:param pulumi.Input['EnabledProtocolsArgs'] enabled_protocols: Representing supported protocols
"""
if clear_tracks is not None:
pulumi.set(__self__, "clear_tracks", clear_tracks)
if content_keys is not None:
pulumi.set(__self__, "content_keys", content_keys)
if custom_key_acquisition_url_template is not None:
pulumi.set(__self__, "custom_key_acquisition_url_template", custom_key_acquisition_url_template)
if enabled_protocols is not None:
pulumi.set(__self__, "enabled_protocols", enabled_protocols)
@property
@pulumi.getter(name="clearTracks")
def clear_tracks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]:
"""
Representing which tracks should not be encrypted
"""
return pulumi.get(self, "clear_tracks")
@clear_tracks.setter
def clear_tracks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]):
pulumi.set(self, "clear_tracks", value)
@property
@pulumi.getter(name="contentKeys")
def content_keys(self) -> Optional[pulumi.Input['StreamingPolicyContentKeysArgs']]:
"""
Representing default content key for each encryption scheme and separate content keys for specific tracks
"""
return pulumi.get(self, "content_keys")
@content_keys.setter
def content_keys(self, value: Optional[pulumi.Input['StreamingPolicyContentKeysArgs']]):
pulumi.set(self, "content_keys", value)
@property
@pulumi.getter(name="customKeyAcquisitionUrlTemplate")
def custom_key_acquisition_url_template(self) -> Optional[pulumi.Input[str]]:
"""
Template for the URL of the custom service delivering keys to end user players. Not required when using Azure Media Services for issuing keys. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
"""
return pulumi.get(self, "custom_key_acquisition_url_template")
@custom_key_acquisition_url_template.setter
def custom_key_acquisition_url_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_key_acquisition_url_template", value)
@property
@pulumi.getter(name="enabledProtocols")
def enabled_protocols(self) -> Optional[pulumi.Input['EnabledProtocolsArgs']]:
"""
Representing supported protocols
"""
return pulumi.get(self, "enabled_protocols")
@enabled_protocols.setter
def enabled_protocols(self, value: Optional[pulumi.Input['EnabledProtocolsArgs']]):
pulumi.set(self, "enabled_protocols", value)
@pulumi.input_type
class FaceDetectorPresetArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
experimental_options: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
resolution: Optional[pulumi.Input[Union[str, 'AnalysisResolution']]] = None):
"""
Describes all the settings to be used when analyzing a video in order to detect all the faces present.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.FaceDetectorPreset'.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] experimental_options: Dictionary containing key value pairs for parameters not exposed in the preset itself
:param pulumi.Input[Union[str, 'AnalysisResolution']] resolution: Specifies the maximum resolution at which your video is analyzed. The default behavior is "SourceResolution," which will keep the input video at its original resolution when analyzed. Using "StandardDefinition" will resize input videos to standard definition while preserving the appropriate aspect ratio. It will only resize if the video is of higher resolution. For example, a 1920x1080 input would be scaled to 640x360 before processing. Switching to "StandardDefinition" will reduce the time it takes to process high resolution video. It may also reduce the cost of using this component (see https://azure.microsoft.com/en-us/pricing/details/media-services/#analytics for details). However, faces that end up being too small in the resized video may not be detected.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.FaceDetectorPreset')
if experimental_options is not None:
pulumi.set(__self__, "experimental_options", experimental_options)
if resolution is not None:
pulumi.set(__self__, "resolution", resolution)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.FaceDetectorPreset'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="experimentalOptions")
def experimental_options(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Dictionary containing key value pairs for parameters not exposed in the preset itself
"""
return pulumi.get(self, "experimental_options")
@experimental_options.setter
def experimental_options(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "experimental_options", value)
@property
@pulumi.getter
def resolution(self) -> Optional[pulumi.Input[Union[str, 'AnalysisResolution']]]:
"""
Specifies the maximum resolution at which your video is analyzed. The default behavior is "SourceResolution," which will keep the input video at its original resolution when analyzed. Using "StandardDefinition" will resize input videos to standard definition while preserving the appropriate aspect ratio. It will only resize if the video is of higher resolution. For example, a 1920x1080 input would be scaled to 640x360 before processing. Switching to "StandardDefinition" will reduce the time it takes to process high resolution video. It may also reduce the cost of using this component (see https://azure.microsoft.com/en-us/pricing/details/media-services/#analytics for details). However, faces that end up being too small in the resized video may not be detected.
"""
return pulumi.get(self, "resolution")
@resolution.setter
def resolution(self, value: Optional[pulumi.Input[Union[str, 'AnalysisResolution']]]):
pulumi.set(self, "resolution", value)
@pulumi.input_type
class FilterTrackPropertyConditionArgs:
def __init__(__self__, *,
operation: pulumi.Input[Union[str, 'FilterTrackPropertyCompareOperation']],
property: pulumi.Input[Union[str, 'FilterTrackPropertyType']],
value: pulumi.Input[str]):
"""
The class to specify one track property condition.
:param pulumi.Input[Union[str, 'FilterTrackPropertyCompareOperation']] operation: The track property condition operation.
:param pulumi.Input[Union[str, 'FilterTrackPropertyType']] property: The track property type.
:param pulumi.Input[str] value: The track property value.
"""
pulumi.set(__self__, "operation", operation)
pulumi.set(__self__, "property", property)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[Union[str, 'FilterTrackPropertyCompareOperation']]:
"""
The track property condition operation.
"""
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[Union[str, 'FilterTrackPropertyCompareOperation']]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
The track property value.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def property(self) -> pulumi.Input[Union[str, 'FilterTrackPropertyType']]:
"""
The track property type.
"""
return pulumi.get(self, "property")
@property.setter
def property(self, value: pulumi.Input[Union[str, 'FilterTrackPropertyType']]):
pulumi.set(self, "property", value)
@pulumi.input_type
class FilterTrackSelectionArgs:
def __init__(__self__, *,
track_selections: pulumi.Input[Sequence[pulumi.Input['FilterTrackPropertyConditionArgs']]]):
"""
Representing a list of FilterTrackPropertyConditions to select a track. The filters are combined using a logical AND operation.
:param pulumi.Input[Sequence[pulumi.Input['FilterTrackPropertyConditionArgs']]] track_selections: The track selections.
"""
pulumi.set(__self__, "track_selections", track_selections)
@property
@pulumi.getter(name="trackSelections")
def track_selections(self) -> pulumi.Input[Sequence[pulumi.Input['FilterTrackPropertyConditionArgs']]]:
"""
The track selections.
"""
return pulumi.get(self, "track_selections")
@track_selections.setter
def track_selections(self, value: pulumi.Input[Sequence[pulumi.Input['FilterTrackPropertyConditionArgs']]]):
pulumi.set(self, "track_selections", value)
@pulumi.input_type
class FiltersArgs:
def __init__(__self__, *,
crop: Optional[pulumi.Input['RectangleArgs']] = None,
deinterlace: Optional[pulumi.Input['DeinterlaceArgs']] = None,
overlays: Optional[pulumi.Input[Sequence[pulumi.Input[Union['AudioOverlayArgs', 'VideoOverlayArgs']]]]] = None,
rotation: Optional[pulumi.Input[Union[str, 'Rotation']]] = None):
"""
Describes all the filtering operations, such as de-interlacing, rotation etc. that are to be applied to the input media before encoding.
:param pulumi.Input['RectangleArgs'] crop: The parameters for the rectangular window with which to crop the input video.
:param pulumi.Input['DeinterlaceArgs'] deinterlace: The de-interlacing settings.
:param pulumi.Input[Sequence[pulumi.Input[Union['AudioOverlayArgs', 'VideoOverlayArgs']]]] overlays: The properties of overlays to be applied to the input video. These could be audio, image or video overlays.
:param pulumi.Input[Union[str, 'Rotation']] rotation: The rotation, if any, to be applied to the input video, before it is encoded. Default is Auto
"""
if crop is not None:
pulumi.set(__self__, "crop", crop)
if deinterlace is not None:
pulumi.set(__self__, "deinterlace", deinterlace)
if overlays is not None:
pulumi.set(__self__, "overlays", overlays)
if rotation is not None:
pulumi.set(__self__, "rotation", rotation)
@property
@pulumi.getter
def crop(self) -> Optional[pulumi.Input['RectangleArgs']]:
"""
The parameters for the rectangular window with which to crop the input video.
"""
return pulumi.get(self, "crop")
@crop.setter
def crop(self, value: Optional[pulumi.Input['RectangleArgs']]):
pulumi.set(self, "crop", value)
@property
@pulumi.getter
def deinterlace(self) -> Optional[pulumi.Input['DeinterlaceArgs']]:
"""
The de-interlacing settings.
"""
return pulumi.get(self, "deinterlace")
@deinterlace.setter
def deinterlace(self, value: Optional[pulumi.Input['DeinterlaceArgs']]):
pulumi.set(self, "deinterlace", value)
@property
@pulumi.getter
def overlays(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union['AudioOverlayArgs', 'VideoOverlayArgs']]]]]:
"""
The properties of overlays to be applied to the input video. These could be audio, image or video overlays.
"""
return pulumi.get(self, "overlays")
@overlays.setter
def overlays(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union['AudioOverlayArgs', 'VideoOverlayArgs']]]]]):
pulumi.set(self, "overlays", value)
@property
@pulumi.getter
def rotation(self) -> Optional[pulumi.Input[Union[str, 'Rotation']]]:
"""
The rotation, if any, to be applied to the input video, before it is encoded. Default is Auto
"""
return pulumi.get(self, "rotation")
@rotation.setter
def rotation(self, value: Optional[pulumi.Input[Union[str, 'Rotation']]]):
pulumi.set(self, "rotation", value)
@pulumi.input_type
class FirstQualityArgs:
def __init__(__self__, *,
bitrate: pulumi.Input[int]):
"""
Filter First Quality
:param pulumi.Input[int] bitrate: The first quality bitrate.
"""
pulumi.set(__self__, "bitrate", bitrate)
@property
@pulumi.getter
def bitrate(self) -> pulumi.Input[int]:
"""
The first quality bitrate.
"""
return pulumi.get(self, "bitrate")
@bitrate.setter
def bitrate(self, value: pulumi.Input[int]):
pulumi.set(self, "bitrate", value)
@pulumi.input_type
class H264LayerArgs:
def __init__(__self__, *,
bitrate: pulumi.Input[int],
odata_type: pulumi.Input[str],
adaptive_b_frame: Optional[pulumi.Input[bool]] = None,
b_frames: Optional[pulumi.Input[int]] = None,
buffer_window: Optional[pulumi.Input[str]] = None,
entropy_mode: Optional[pulumi.Input[Union[str, 'EntropyMode']]] = None,
frame_rate: Optional[pulumi.Input[str]] = None,
height: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
level: Optional[pulumi.Input[str]] = None,
max_bitrate: Optional[pulumi.Input[int]] = None,
profile: Optional[pulumi.Input[Union[str, 'H264VideoProfile']]] = None,
reference_frames: Optional[pulumi.Input[int]] = None,
slices: Optional[pulumi.Input[int]] = None,
width: Optional[pulumi.Input[str]] = None):
"""
Describes the settings to be used when encoding the input video into a desired output bitrate layer with the H.264 video codec.
:param pulumi.Input[int] bitrate: The average bitrate in bits per second at which to encode the input video when generating this layer. This is a required field.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.VideoLayer'.
:param pulumi.Input[bool] adaptive_b_frame: Whether or not adaptive B-frames are to be used when encoding this layer. If not specified, the encoder will turn it on whenever the video profile permits its use.
:param pulumi.Input[int] b_frames: The number of B-frames to be used when encoding this layer. If not specified, the encoder chooses an appropriate number based on the video profile and level.
:param pulumi.Input[str] buffer_window: The VBV buffer window length. The value should be in ISO 8601 format. The value should be in the range [0.1-100] seconds. The default is 5 seconds (for example, PT5S).
:param pulumi.Input[Union[str, 'EntropyMode']] entropy_mode: The entropy mode to be used for this layer. If not specified, the encoder chooses the mode that is appropriate for the profile and level.
:param pulumi.Input[str] frame_rate: The frame rate (in frames per second) at which to encode this layer. The value can be in the form of M/N where M and N are integers (For example, 30000/1001), or in the form of a number (For example, 30, or 29.97). The encoder enforces constraints on allowed frame rates based on the profile and level. If it is not specified, the encoder will use the same frame rate as the input video.
:param pulumi.Input[str] height: The height of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in height as the input.
:param pulumi.Input[str] label: The alphanumeric label for this layer, which can be used in multiplexing different video and audio layers, or in naming the output file.
:param pulumi.Input[str] level: We currently support Level up to 6.2. The value can be Auto, or a number that matches the H.264 profile. If not specified, the default is Auto, which lets the encoder choose the Level that is appropriate for this layer.
:param pulumi.Input[int] max_bitrate: The maximum bitrate (in bits per second), at which the VBV buffer should be assumed to refill. If not specified, defaults to the same value as bitrate.
:param pulumi.Input[Union[str, 'H264VideoProfile']] profile: We currently support Baseline, Main, High, High422, High444. Default is Auto.
:param pulumi.Input[int] reference_frames: The number of reference frames to be used when encoding this layer. If not specified, the encoder determines an appropriate number based on the encoder complexity setting.
:param pulumi.Input[int] slices: The number of slices to be used when encoding this layer. If not specified, default is zero, which means that encoder will use a single slice for each frame.
:param pulumi.Input[str] width: The width of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in width as the input.
"""
pulumi.set(__self__, "bitrate", bitrate)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.VideoLayer')
if adaptive_b_frame is not None:
pulumi.set(__self__, "adaptive_b_frame", adaptive_b_frame)
if b_frames is not None:
pulumi.set(__self__, "b_frames", b_frames)
if buffer_window is not None:
pulumi.set(__self__, "buffer_window", buffer_window)
if entropy_mode is not None:
pulumi.set(__self__, "entropy_mode", entropy_mode)
if frame_rate is not None:
pulumi.set(__self__, "frame_rate", frame_rate)
if height is not None:
pulumi.set(__self__, "height", height)
if label is not None:
pulumi.set(__self__, "label", label)
if level is not None:
pulumi.set(__self__, "level", level)
if max_bitrate is not None:
pulumi.set(__self__, "max_bitrate", max_bitrate)
if profile is not None:
pulumi.set(__self__, "profile", profile)
if reference_frames is not None:
pulumi.set(__self__, "reference_frames", reference_frames)
if slices is not None:
pulumi.set(__self__, "slices", slices)
if width is not None:
pulumi.set(__self__, "width", width)
@property
@pulumi.getter
def bitrate(self) -> pulumi.Input[int]:
"""
The average bitrate in bits per second at which to encode the input video when generating this layer. This is a required field.
"""
return pulumi.get(self, "bitrate")
@bitrate.setter
def bitrate(self, value: pulumi.Input[int]):
pulumi.set(self, "bitrate", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.VideoLayer'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="adaptiveBFrame")
def adaptive_b_frame(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not adaptive B-frames are to be used when encoding this layer. If not specified, the encoder will turn it on whenever the video profile permits its use.
"""
return pulumi.get(self, "adaptive_b_frame")
@adaptive_b_frame.setter
def adaptive_b_frame(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "adaptive_b_frame", value)
@property
@pulumi.getter(name="bFrames")
def b_frames(self) -> Optional[pulumi.Input[int]]:
"""
The number of B-frames to be used when encoding this layer. If not specified, the encoder chooses an appropriate number based on the video profile and level.
"""
return pulumi.get(self, "b_frames")
@b_frames.setter
def b_frames(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "b_frames", value)
@property
@pulumi.getter(name="bufferWindow")
def buffer_window(self) -> Optional[pulumi.Input[str]]:
"""
The VBV buffer window length. The value should be in ISO 8601 format. The value should be in the range [0.1-100] seconds. The default is 5 seconds (for example, PT5S).
"""
return pulumi.get(self, "buffer_window")
@buffer_window.setter
def buffer_window(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "buffer_window", value)
@property
@pulumi.getter(name="entropyMode")
def entropy_mode(self) -> Optional[pulumi.Input[Union[str, 'EntropyMode']]]:
"""
The entropy mode to be used for this layer. If not specified, the encoder chooses the mode that is appropriate for the profile and level.
"""
return pulumi.get(self, "entropy_mode")
@entropy_mode.setter
def entropy_mode(self, value: Optional[pulumi.Input[Union[str, 'EntropyMode']]]):
pulumi.set(self, "entropy_mode", value)
@property
@pulumi.getter(name="frameRate")
def frame_rate(self) -> Optional[pulumi.Input[str]]:
"""
The frame rate (in frames per second) at which to encode this layer. The value can be in the form of M/N where M and N are integers (For example, 30000/1001), or in the form of a number (For example, 30, or 29.97). The encoder enforces constraints on allowed frame rates based on the profile and level. If it is not specified, the encoder will use the same frame rate as the input video.
"""
return pulumi.get(self, "frame_rate")
@frame_rate.setter
def frame_rate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "frame_rate", value)
@property
@pulumi.getter
def height(self) -> Optional[pulumi.Input[str]]:
"""
The height of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in height as the input.
"""
return pulumi.get(self, "height")
@height.setter
def height(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "height", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
The alphanumeric label for this layer, which can be used in multiplexing different video and audio layers, or in naming the output file.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def level(self) -> Optional[pulumi.Input[str]]:
"""
We currently support Level up to 6.2. The value can be Auto, or a number that matches the H.264 profile. If not specified, the default is Auto, which lets the encoder choose the Level that is appropriate for this layer.
"""
return pulumi.get(self, "level")
@level.setter
def level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "level", value)
@property
@pulumi.getter(name="maxBitrate")
def max_bitrate(self) -> Optional[pulumi.Input[int]]:
"""
The maximum bitrate (in bits per second), at which the VBV buffer should be assumed to refill. If not specified, defaults to the same value as bitrate.
"""
return pulumi.get(self, "max_bitrate")
@max_bitrate.setter
def max_bitrate(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_bitrate", value)
@property
@pulumi.getter
def profile(self) -> Optional[pulumi.Input[Union[str, 'H264VideoProfile']]]:
"""
We currently support Baseline, Main, High, High422, High444. Default is Auto.
"""
return pulumi.get(self, "profile")
@profile.setter
def profile(self, value: Optional[pulumi.Input[Union[str, 'H264VideoProfile']]]):
pulumi.set(self, "profile", value)
@property
@pulumi.getter(name="referenceFrames")
def reference_frames(self) -> Optional[pulumi.Input[int]]:
"""
The number of reference frames to be used when encoding this layer. If not specified, the encoder determines an appropriate number based on the encoder complexity setting.
"""
return pulumi.get(self, "reference_frames")
@reference_frames.setter
def reference_frames(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "reference_frames", value)
@property
@pulumi.getter
def slices(self) -> Optional[pulumi.Input[int]]:
"""
The number of slices to be used when encoding this layer. If not specified, default is zero, which means that encoder will use a single slice for each frame.
"""
return pulumi.get(self, "slices")
@slices.setter
def slices(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "slices", value)
@property
@pulumi.getter
def width(self) -> Optional[pulumi.Input[str]]:
"""
The width of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in width as the input.
"""
return pulumi.get(self, "width")
@width.setter
def width(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "width", value)
@pulumi.input_type
class H264VideoArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
complexity: Optional[pulumi.Input[Union[str, 'H264Complexity']]] = None,
key_frame_interval: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
layers: Optional[pulumi.Input[Sequence[pulumi.Input['H264LayerArgs']]]] = None,
scene_change_detection: Optional[pulumi.Input[bool]] = None,
stretch_mode: Optional[pulumi.Input[Union[str, 'StretchMode']]] = None):
"""
Describes all the properties for encoding a video with the H.264 codec.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
:param pulumi.Input[Union[str, 'H264Complexity']] complexity: Tells the encoder how to choose its encoding settings. The default value is Balanced.
:param pulumi.Input[str] key_frame_interval: The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
:param pulumi.Input[Sequence[pulumi.Input['H264LayerArgs']]] layers: The collection of output H.264 layers to be produced by the encoder.
:param pulumi.Input[bool] scene_change_detection: Whether or not the encoder should insert key frames at scene changes. If not specified, the default is false. This flag should be set to true only when the encoder is being configured to produce a single output video.
:param pulumi.Input[Union[str, 'StretchMode']] stretch_mode: The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.Video')
if complexity is not None:
pulumi.set(__self__, "complexity", complexity)
if key_frame_interval is not None:
pulumi.set(__self__, "key_frame_interval", key_frame_interval)
if label is not None:
pulumi.set(__self__, "label", label)
if layers is not None:
pulumi.set(__self__, "layers", layers)
if scene_change_detection is not None:
pulumi.set(__self__, "scene_change_detection", scene_change_detection)
if stretch_mode is not None:
pulumi.set(__self__, "stretch_mode", stretch_mode)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def complexity(self) -> Optional[pulumi.Input[Union[str, 'H264Complexity']]]:
"""
Tells the encoder how to choose its encoding settings. The default value is Balanced.
"""
return pulumi.get(self, "complexity")
@complexity.setter
def complexity(self, value: Optional[pulumi.Input[Union[str, 'H264Complexity']]]):
pulumi.set(self, "complexity", value)
@property
@pulumi.getter(name="keyFrameInterval")
def key_frame_interval(self) -> Optional[pulumi.Input[str]]:
"""
The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
"""
return pulumi.get(self, "key_frame_interval")
@key_frame_interval.setter
def key_frame_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_frame_interval", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def layers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['H264LayerArgs']]]]:
"""
The collection of output H.264 layers to be produced by the encoder.
"""
return pulumi.get(self, "layers")
@layers.setter
def layers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['H264LayerArgs']]]]):
pulumi.set(self, "layers", value)
@property
@pulumi.getter(name="sceneChangeDetection")
def scene_change_detection(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not the encoder should insert key frames at scene changes. If not specified, the default is false. This flag should be set to true only when the encoder is being configured to produce a single output video.
"""
return pulumi.get(self, "scene_change_detection")
@scene_change_detection.setter
def scene_change_detection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "scene_change_detection", value)
@property
@pulumi.getter(name="stretchMode")
def stretch_mode(self) -> Optional[pulumi.Input[Union[str, 'StretchMode']]]:
"""
The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
return pulumi.get(self, "stretch_mode")
@stretch_mode.setter
def stretch_mode(self, value: Optional[pulumi.Input[Union[str, 'StretchMode']]]):
pulumi.set(self, "stretch_mode", value)
@pulumi.input_type
class HlsArgs:
def __init__(__self__, *,
fragments_per_ts_segment: Optional[pulumi.Input[int]] = None):
"""
The HLS configuration.
:param pulumi.Input[int] fragments_per_ts_segment: The amount of fragments per HTTP Live Streaming (HLS) segment.
"""
if fragments_per_ts_segment is not None:
pulumi.set(__self__, "fragments_per_ts_segment", fragments_per_ts_segment)
@property
@pulumi.getter(name="fragmentsPerTsSegment")
def fragments_per_ts_segment(self) -> Optional[pulumi.Input[int]]:
"""
The amount of fragments per HTTP Live Streaming (HLS) segment.
"""
return pulumi.get(self, "fragments_per_ts_segment")
@fragments_per_ts_segment.setter
def fragments_per_ts_segment(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "fragments_per_ts_segment", value)
@pulumi.input_type
class IPAccessControlArgs:
def __init__(__self__, *,
allow: Optional[pulumi.Input[Sequence[pulumi.Input['IPRangeArgs']]]] = None):
"""
The IP access control.
:param pulumi.Input[Sequence[pulumi.Input['IPRangeArgs']]] allow: The IP allow list.
"""
if allow is not None:
pulumi.set(__self__, "allow", allow)
@property
@pulumi.getter
def allow(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IPRangeArgs']]]]:
"""
The IP allow list.
"""
return pulumi.get(self, "allow")
@allow.setter
def allow(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IPRangeArgs']]]]):
pulumi.set(self, "allow", value)
@pulumi.input_type
class IPRangeArgs:
def __init__(__self__, *,
address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
subnet_prefix_length: Optional[pulumi.Input[int]] = None):
"""
The IP address range in the CIDR scheme.
:param pulumi.Input[str] address: The IP address.
:param pulumi.Input[str] name: The friendly name for the IP address range.
:param pulumi.Input[int] subnet_prefix_length: The subnet mask prefix length (see CIDR notation).
"""
if address is not None:
pulumi.set(__self__, "address", address)
if name is not None:
pulumi.set(__self__, "name", name)
if subnet_prefix_length is not None:
pulumi.set(__self__, "subnet_prefix_length", subnet_prefix_length)
@property
@pulumi.getter
def address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address.
"""
return pulumi.get(self, "address")
@address.setter
def address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The friendly name for the IP address range.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="subnetPrefixLength")
def subnet_prefix_length(self) -> Optional[pulumi.Input[int]]:
"""
The subnet mask prefix length (see CIDR notation).
"""
return pulumi.get(self, "subnet_prefix_length")
@subnet_prefix_length.setter
def subnet_prefix_length(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "subnet_prefix_length", value)
@pulumi.input_type
class ImageArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
start: pulumi.Input[str],
key_frame_interval: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
range: Optional[pulumi.Input[str]] = None,
step: Optional[pulumi.Input[str]] = None,
stretch_mode: Optional[pulumi.Input[Union[str, 'StretchMode']]] = None):
"""
Describes the basic properties for generating thumbnails from the input video
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
:param pulumi.Input[str] start: The position in the input video from where to start generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT05S), or a frame count (For example, 10 for the 10th frame), or a relative value (For example, 1%). Also supports a macro {Best}, which tells the encoder to select the best thumbnail from the first few seconds of the video.
:param pulumi.Input[str] key_frame_interval: The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
:param pulumi.Input[str] range: The position in the input video at which to stop generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT5M30S to stop at 5 minutes and 30 seconds), or a frame count (For example, 300 to stop at the 300th frame), or a relative value (For example, 100%).
:param pulumi.Input[str] step: The intervals at which thumbnails are generated. The value can be in absolute timestamp (ISO 8601, e.g: PT05S for one image every 5 seconds), or a frame count (For example, 30 for every 30 frames), or a relative value (For example, 1%).
:param pulumi.Input[Union[str, 'StretchMode']] stretch_mode: The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.Video')
pulumi.set(__self__, "start", start)
if key_frame_interval is not None:
pulumi.set(__self__, "key_frame_interval", key_frame_interval)
if label is not None:
pulumi.set(__self__, "label", label)
if range is not None:
pulumi.set(__self__, "range", range)
if step is not None:
pulumi.set(__self__, "step", step)
if stretch_mode is not None:
pulumi.set(__self__, "stretch_mode", stretch_mode)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def start(self) -> pulumi.Input[str]:
"""
The position in the input video from where to start generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT05S), or a frame count (For example, 10 for the 10th frame), or a relative value (For example, 1%). Also supports a macro {Best}, which tells the encoder to select the best thumbnail from the first few seconds of the video.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: pulumi.Input[str]):
pulumi.set(self, "start", value)
@property
@pulumi.getter(name="keyFrameInterval")
def key_frame_interval(self) -> Optional[pulumi.Input[str]]:
"""
The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
"""
return pulumi.get(self, "key_frame_interval")
@key_frame_interval.setter
def key_frame_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_frame_interval", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def range(self) -> Optional[pulumi.Input[str]]:
"""
The position in the input video at which to stop generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT5M30S to stop at 5 minutes and 30 seconds), or a frame count (For example, 300 to stop at the 300th frame), or a relative value (For example, 100%).
"""
return pulumi.get(self, "range")
@range.setter
def range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "range", value)
@property
@pulumi.getter
def step(self) -> Optional[pulumi.Input[str]]:
"""
The intervals at which thumbnails are generated. The value can be in absolute timestamp (ISO 8601, e.g: PT05S for one image every 5 seconds), or a frame count (For example, 30 for every 30 frames), or a relative value (For example, 1%).
"""
return pulumi.get(self, "step")
@step.setter
def step(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "step", value)
@property
@pulumi.getter(name="stretchMode")
def stretch_mode(self) -> Optional[pulumi.Input[Union[str, 'StretchMode']]]:
"""
The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
return pulumi.get(self, "stretch_mode")
@stretch_mode.setter
def stretch_mode(self, value: Optional[pulumi.Input[Union[str, 'StretchMode']]]):
pulumi.set(self, "stretch_mode", value)
@pulumi.input_type
class ImageFormatArgs:
def __init__(__self__, *,
filename_pattern: pulumi.Input[str],
odata_type: pulumi.Input[str]):
"""
Describes the properties for an output image file.
:param pulumi.Input[str] filename_pattern: The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ImageFormat'.
"""
pulumi.set(__self__, "filename_pattern", filename_pattern)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ImageFormat')
@property
@pulumi.getter(name="filenamePattern")
def filename_pattern(self) -> pulumi.Input[str]:
"""
The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
"""
return pulumi.get(self, "filename_pattern")
@filename_pattern.setter
def filename_pattern(self, value: pulumi.Input[str]):
pulumi.set(self, "filename_pattern", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ImageFormat'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class JobInputAssetArgs:
def __init__(__self__, *,
asset_name: pulumi.Input[str],
odata_type: pulumi.Input[str],
end: Optional[pulumi.Input['AbsoluteClipTimeArgs']] = None,
files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
label: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input['AbsoluteClipTimeArgs']] = None):
"""
Represents an Asset for input into a Job.
:param pulumi.Input[str] asset_name: The name of the input Asset.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputClip'.
:param pulumi.Input['AbsoluteClipTimeArgs'] end: Defines a point on the timeline of the input media at which processing will end. Defaults to the end of the input media.
:param pulumi.Input[Sequence[pulumi.Input[str]]] files: List of files. Required for JobInputHttp. Maximum of 4000 characters each.
:param pulumi.Input[str] label: A label that is assigned to a JobInputClip, that is used to satisfy a reference used in the Transform. For example, a Transform can be authored so as to take an image file with the label 'xyz' and apply it as an overlay onto the input video before it is encoded. When submitting a Job, exactly one of the JobInputs should be the image file, and it should have the label 'xyz'.
:param pulumi.Input['AbsoluteClipTimeArgs'] start: Defines a point on the timeline of the input media at which processing will start. Defaults to the beginning of the input media.
"""
pulumi.set(__self__, "asset_name", asset_name)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.JobInputClip')
if end is not None:
pulumi.set(__self__, "end", end)
if files is not None:
pulumi.set(__self__, "files", files)
if label is not None:
pulumi.set(__self__, "label", label)
if start is not None:
pulumi.set(__self__, "start", start)
@property
@pulumi.getter(name="assetName")
def asset_name(self) -> pulumi.Input[str]:
"""
The name of the input Asset.
"""
return pulumi.get(self, "asset_name")
@asset_name.setter
def asset_name(self, value: pulumi.Input[str]):
pulumi.set(self, "asset_name", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputClip'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def end(self) -> Optional[pulumi.Input['AbsoluteClipTimeArgs']]:
"""
Defines a point on the timeline of the input media at which processing will end. Defaults to the end of the input media.
"""
return pulumi.get(self, "end")
@end.setter
def end(self, value: Optional[pulumi.Input['AbsoluteClipTimeArgs']]):
pulumi.set(self, "end", value)
@property
@pulumi.getter
def files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of files. Required for JobInputHttp. Maximum of 4000 characters each.
"""
return pulumi.get(self, "files")
@files.setter
def files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "files", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
A label that is assigned to a JobInputClip, that is used to satisfy a reference used in the Transform. For example, a Transform can be authored so as to take an image file with the label 'xyz' and apply it as an overlay onto the input video before it is encoded. When submitting a Job, exactly one of the JobInputs should be the image file, and it should have the label 'xyz'.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def start(self) -> Optional[pulumi.Input['AbsoluteClipTimeArgs']]:
"""
Defines a point on the timeline of the input media at which processing will start. Defaults to the beginning of the input media.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: Optional[pulumi.Input['AbsoluteClipTimeArgs']]):
pulumi.set(self, "start", value)
@pulumi.input_type
class JobInputClipArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
end: Optional[pulumi.Input['AbsoluteClipTimeArgs']] = None,
files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
label: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input['AbsoluteClipTimeArgs']] = None):
"""
Represents input files for a Job.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputClip'.
:param pulumi.Input['AbsoluteClipTimeArgs'] end: Defines a point on the timeline of the input media at which processing will end. Defaults to the end of the input media.
:param pulumi.Input[Sequence[pulumi.Input[str]]] files: List of files. Required for JobInputHttp. Maximum of 4000 characters each.
:param pulumi.Input[str] label: A label that is assigned to a JobInputClip, that is used to satisfy a reference used in the Transform. For example, a Transform can be authored so as to take an image file with the label 'xyz' and apply it as an overlay onto the input video before it is encoded. When submitting a Job, exactly one of the JobInputs should be the image file, and it should have the label 'xyz'.
:param pulumi.Input['AbsoluteClipTimeArgs'] start: Defines a point on the timeline of the input media at which processing will start. Defaults to the beginning of the input media.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.JobInputClip')
if end is not None:
pulumi.set(__self__, "end", end)
if files is not None:
pulumi.set(__self__, "files", files)
if label is not None:
pulumi.set(__self__, "label", label)
if start is not None:
pulumi.set(__self__, "start", start)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputClip'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def end(self) -> Optional[pulumi.Input['AbsoluteClipTimeArgs']]:
"""
Defines a point on the timeline of the input media at which processing will end. Defaults to the end of the input media.
"""
return pulumi.get(self, "end")
@end.setter
def end(self, value: Optional[pulumi.Input['AbsoluteClipTimeArgs']]):
pulumi.set(self, "end", value)
@property
@pulumi.getter
def files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of files. Required for JobInputHttp. Maximum of 4000 characters each.
"""
return pulumi.get(self, "files")
@files.setter
def files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "files", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
A label that is assigned to a JobInputClip, that is used to satisfy a reference used in the Transform. For example, a Transform can be authored so as to take an image file with the label 'xyz' and apply it as an overlay onto the input video before it is encoded. When submitting a Job, exactly one of the JobInputs should be the image file, and it should have the label 'xyz'.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def start(self) -> Optional[pulumi.Input['AbsoluteClipTimeArgs']]:
"""
Defines a point on the timeline of the input media at which processing will start. Defaults to the beginning of the input media.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: Optional[pulumi.Input['AbsoluteClipTimeArgs']]):
pulumi.set(self, "start", value)
@pulumi.input_type
class JobInputHttpArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
base_uri: Optional[pulumi.Input[str]] = None,
end: Optional[pulumi.Input['AbsoluteClipTimeArgs']] = None,
files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
label: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input['AbsoluteClipTimeArgs']] = None):
"""
Represents HTTPS job input.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputClip'.
:param pulumi.Input[str] base_uri: Base URI for HTTPS job input. It will be concatenated with provided file names. If no base uri is given, then the provided file list is assumed to be fully qualified uris. Maximum length of 4000 characters.
:param pulumi.Input['AbsoluteClipTimeArgs'] end: Defines a point on the timeline of the input media at which processing will end. Defaults to the end of the input media.
:param pulumi.Input[Sequence[pulumi.Input[str]]] files: List of files. Required for JobInputHttp. Maximum of 4000 characters each.
:param pulumi.Input[str] label: A label that is assigned to a JobInputClip, that is used to satisfy a reference used in the Transform. For example, a Transform can be authored so as to take an image file with the label 'xyz' and apply it as an overlay onto the input video before it is encoded. When submitting a Job, exactly one of the JobInputs should be the image file, and it should have the label 'xyz'.
:param pulumi.Input['AbsoluteClipTimeArgs'] start: Defines a point on the timeline of the input media at which processing will start. Defaults to the beginning of the input media.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.JobInputClip')
if base_uri is not None:
pulumi.set(__self__, "base_uri", base_uri)
if end is not None:
pulumi.set(__self__, "end", end)
if files is not None:
pulumi.set(__self__, "files", files)
if label is not None:
pulumi.set(__self__, "label", label)
if start is not None:
pulumi.set(__self__, "start", start)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputClip'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="baseUri")
def base_uri(self) -> Optional[pulumi.Input[str]]:
"""
Base URI for HTTPS job input. It will be concatenated with provided file names. If no base uri is given, then the provided file list is assumed to be fully qualified uris. Maximum length of 4000 characters.
"""
return pulumi.get(self, "base_uri")
@base_uri.setter
def base_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "base_uri", value)
@property
@pulumi.getter
def end(self) -> Optional[pulumi.Input['AbsoluteClipTimeArgs']]:
"""
Defines a point on the timeline of the input media at which processing will end. Defaults to the end of the input media.
"""
return pulumi.get(self, "end")
@end.setter
def end(self, value: Optional[pulumi.Input['AbsoluteClipTimeArgs']]):
pulumi.set(self, "end", value)
@property
@pulumi.getter
def files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of files. Required for JobInputHttp. Maximum of 4000 characters each.
"""
return pulumi.get(self, "files")
@files.setter
def files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "files", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
A label that is assigned to a JobInputClip, that is used to satisfy a reference used in the Transform. For example, a Transform can be authored so as to take an image file with the label 'xyz' and apply it as an overlay onto the input video before it is encoded. When submitting a Job, exactly one of the JobInputs should be the image file, and it should have the label 'xyz'.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def start(self) -> Optional[pulumi.Input['AbsoluteClipTimeArgs']]:
"""
Defines a point on the timeline of the input media at which processing will start. Defaults to the beginning of the input media.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: Optional[pulumi.Input['AbsoluteClipTimeArgs']]):
pulumi.set(self, "start", value)
@pulumi.input_type
class JobInputsArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
inputs: Optional[pulumi.Input[Sequence[pulumi.Input[Union['JobInputAssetArgs', 'JobInputClipArgs', 'JobInputHttpArgs', 'JobInputsArgs']]]]] = None):
"""
Describes a list of inputs to a Job.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputs'.
:param pulumi.Input[Sequence[pulumi.Input[Union['JobInputAssetArgs', 'JobInputClipArgs', 'JobInputHttpArgs', 'JobInputsArgs']]]] inputs: List of inputs to a Job.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.JobInputs')
if inputs is not None:
pulumi.set(__self__, "inputs", inputs)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.JobInputs'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def inputs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union['JobInputAssetArgs', 'JobInputClipArgs', 'JobInputHttpArgs', 'JobInputsArgs']]]]]:
"""
List of inputs to a Job.
"""
return pulumi.get(self, "inputs")
@inputs.setter
def inputs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union['JobInputAssetArgs', 'JobInputClipArgs', 'JobInputHttpArgs', 'JobInputsArgs']]]]]):
pulumi.set(self, "inputs", value)
@pulumi.input_type
class JobOutputAssetArgs:
def __init__(__self__, *,
asset_name: pulumi.Input[str],
odata_type: pulumi.Input[str],
label: Optional[pulumi.Input[str]] = None):
"""
Represents an Asset used as a JobOutput.
:param pulumi.Input[str] asset_name: The name of the output Asset.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.JobOutputAsset'.
:param pulumi.Input[str] label: A label that is assigned to a JobOutput in order to help uniquely identify it. This is useful when your Transform has more than one TransformOutput, whereby your Job has more than one JobOutput. In such cases, when you submit the Job, you will add two or more JobOutputs, in the same order as TransformOutputs in the Transform. Subsequently, when you retrieve the Job, either through events or on a GET request, you can use the label to easily identify the JobOutput. If a label is not provided, a default value of '{presetName}_{outputIndex}' will be used, where the preset name is the name of the preset in the corresponding TransformOutput and the output index is the relative index of the this JobOutput within the Job. Note that this index is the same as the relative index of the corresponding TransformOutput within its Transform.
"""
pulumi.set(__self__, "asset_name", asset_name)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.JobOutputAsset')
if label is not None:
pulumi.set(__self__, "label", label)
@property
@pulumi.getter(name="assetName")
def asset_name(self) -> pulumi.Input[str]:
"""
The name of the output Asset.
"""
return pulumi.get(self, "asset_name")
@asset_name.setter
def asset_name(self, value: pulumi.Input[str]):
pulumi.set(self, "asset_name", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.JobOutputAsset'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
A label that is assigned to a JobOutput in order to help uniquely identify it. This is useful when your Transform has more than one TransformOutput, whereby your Job has more than one JobOutput. In such cases, when you submit the Job, you will add two or more JobOutputs, in the same order as TransformOutputs in the Transform. Subsequently, when you retrieve the Job, either through events or on a GET request, you can use the label to easily identify the JobOutput. If a label is not provided, a default value of '{presetName}_{outputIndex}' will be used, where the preset name is the name of the preset in the corresponding TransformOutput and the output index is the relative index of the this JobOutput within the Job. Note that this index is the same as the relative index of the corresponding TransformOutput within its Transform.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@pulumi.input_type
class JpgFormatArgs:
def __init__(__self__, *,
filename_pattern: pulumi.Input[str],
odata_type: pulumi.Input[str]):
"""
Describes the settings for producing JPEG thumbnails.
:param pulumi.Input[str] filename_pattern: The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ImageFormat'.
"""
pulumi.set(__self__, "filename_pattern", filename_pattern)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ImageFormat')
@property
@pulumi.getter(name="filenamePattern")
def filename_pattern(self) -> pulumi.Input[str]:
"""
The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
"""
return pulumi.get(self, "filename_pattern")
@filename_pattern.setter
def filename_pattern(self, value: pulumi.Input[str]):
pulumi.set(self, "filename_pattern", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ImageFormat'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class JpgImageArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
start: pulumi.Input[str],
key_frame_interval: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
layers: Optional[pulumi.Input[Sequence[pulumi.Input['JpgLayerArgs']]]] = None,
range: Optional[pulumi.Input[str]] = None,
step: Optional[pulumi.Input[str]] = None,
stretch_mode: Optional[pulumi.Input[Union[str, 'StretchMode']]] = None):
"""
Describes the properties for producing a series of JPEG images from the input video.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
:param pulumi.Input[str] start: The position in the input video from where to start generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT05S), or a frame count (For example, 10 for the 10th frame), or a relative value (For example, 1%). Also supports a macro {Best}, which tells the encoder to select the best thumbnail from the first few seconds of the video.
:param pulumi.Input[str] key_frame_interval: The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
:param pulumi.Input[Sequence[pulumi.Input['JpgLayerArgs']]] layers: A collection of output JPEG image layers to be produced by the encoder.
:param pulumi.Input[str] range: The position in the input video at which to stop generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT5M30S to stop at 5 minutes and 30 seconds), or a frame count (For example, 300 to stop at the 300th frame), or a relative value (For example, 100%).
:param pulumi.Input[str] step: The intervals at which thumbnails are generated. The value can be in absolute timestamp (ISO 8601, e.g: PT05S for one image every 5 seconds), or a frame count (For example, 30 for every 30 frames), or a relative value (For example, 1%).
:param pulumi.Input[Union[str, 'StretchMode']] stretch_mode: The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.Video')
pulumi.set(__self__, "start", start)
if key_frame_interval is not None:
pulumi.set(__self__, "key_frame_interval", key_frame_interval)
if label is not None:
pulumi.set(__self__, "label", label)
if layers is not None:
pulumi.set(__self__, "layers", layers)
if range is not None:
pulumi.set(__self__, "range", range)
if step is not None:
pulumi.set(__self__, "step", step)
if stretch_mode is not None:
pulumi.set(__self__, "stretch_mode", stretch_mode)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def start(self) -> pulumi.Input[str]:
"""
The position in the input video from where to start generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT05S), or a frame count (For example, 10 for the 10th frame), or a relative value (For example, 1%). Also supports a macro {Best}, which tells the encoder to select the best thumbnail from the first few seconds of the video.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: pulumi.Input[str]):
pulumi.set(self, "start", value)
@property
@pulumi.getter(name="keyFrameInterval")
def key_frame_interval(self) -> Optional[pulumi.Input[str]]:
"""
The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
"""
return pulumi.get(self, "key_frame_interval")
@key_frame_interval.setter
def key_frame_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_frame_interval", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def layers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['JpgLayerArgs']]]]:
"""
A collection of output JPEG image layers to be produced by the encoder.
"""
return pulumi.get(self, "layers")
@layers.setter
def layers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['JpgLayerArgs']]]]):
pulumi.set(self, "layers", value)
@property
@pulumi.getter
def range(self) -> Optional[pulumi.Input[str]]:
"""
The position in the input video at which to stop generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT5M30S to stop at 5 minutes and 30 seconds), or a frame count (For example, 300 to stop at the 300th frame), or a relative value (For example, 100%).
"""
return pulumi.get(self, "range")
@range.setter
def range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "range", value)
@property
@pulumi.getter
def step(self) -> Optional[pulumi.Input[str]]:
"""
The intervals at which thumbnails are generated. The value can be in absolute timestamp (ISO 8601, e.g: PT05S for one image every 5 seconds), or a frame count (For example, 30 for every 30 frames), or a relative value (For example, 1%).
"""
return pulumi.get(self, "step")
@step.setter
def step(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "step", value)
@property
@pulumi.getter(name="stretchMode")
def stretch_mode(self) -> Optional[pulumi.Input[Union[str, 'StretchMode']]]:
"""
The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
return pulumi.get(self, "stretch_mode")
@stretch_mode.setter
def stretch_mode(self, value: Optional[pulumi.Input[Union[str, 'StretchMode']]]):
pulumi.set(self, "stretch_mode", value)
@pulumi.input_type
class JpgLayerArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
height: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
quality: Optional[pulumi.Input[int]] = None,
width: Optional[pulumi.Input[str]] = None):
"""
Describes the settings to produce a JPEG image from the input video.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.JpgLayer'.
:param pulumi.Input[str] height: The height of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in height as the input.
:param pulumi.Input[str] label: The alphanumeric label for this layer, which can be used in multiplexing different video and audio layers, or in naming the output file.
:param pulumi.Input[int] quality: The compression quality of the JPEG output. Range is from 0-100 and the default is 70.
:param pulumi.Input[str] width: The width of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in width as the input.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.JpgLayer')
if height is not None:
pulumi.set(__self__, "height", height)
if label is not None:
pulumi.set(__self__, "label", label)
if quality is not None:
pulumi.set(__self__, "quality", quality)
if width is not None:
pulumi.set(__self__, "width", width)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.JpgLayer'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def height(self) -> Optional[pulumi.Input[str]]:
"""
The height of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in height as the input.
"""
return pulumi.get(self, "height")
@height.setter
def height(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "height", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
The alphanumeric label for this layer, which can be used in multiplexing different video and audio layers, or in naming the output file.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def quality(self) -> Optional[pulumi.Input[int]]:
"""
The compression quality of the JPEG output. Range is from 0-100 and the default is 70.
"""
return pulumi.get(self, "quality")
@quality.setter
def quality(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "quality", value)
@property
@pulumi.getter
def width(self) -> Optional[pulumi.Input[str]]:
"""
The width of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in width as the input.
"""
return pulumi.get(self, "width")
@width.setter
def width(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "width", value)
@pulumi.input_type
class LiveEventEncodingArgs:
def __init__(__self__, *,
encoding_type: Optional[pulumi.Input[Union[str, 'LiveEventEncodingType']]] = None,
preset_name: Optional[pulumi.Input[str]] = None):
"""
The Live Event encoding.
:param pulumi.Input[Union[str, 'LiveEventEncodingType']] encoding_type: The encoding type for Live Event. This value is specified at creation time and cannot be updated.
:param pulumi.Input[str] preset_name: The encoding preset name. This value is specified at creation time and cannot be updated.
"""
if encoding_type is not None:
pulumi.set(__self__, "encoding_type", encoding_type)
if preset_name is not None:
pulumi.set(__self__, "preset_name", preset_name)
@property
@pulumi.getter(name="encodingType")
def encoding_type(self) -> Optional[pulumi.Input[Union[str, 'LiveEventEncodingType']]]:
"""
The encoding type for Live Event. This value is specified at creation time and cannot be updated.
"""
return pulumi.get(self, "encoding_type")
@encoding_type.setter
def encoding_type(self, value: Optional[pulumi.Input[Union[str, 'LiveEventEncodingType']]]):
pulumi.set(self, "encoding_type", value)
@property
@pulumi.getter(name="presetName")
def preset_name(self) -> Optional[pulumi.Input[str]]:
"""
The encoding preset name. This value is specified at creation time and cannot be updated.
"""
return pulumi.get(self, "preset_name")
@preset_name.setter
def preset_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "preset_name", value)
@pulumi.input_type
class LiveEventEndpointArgs:
def __init__(__self__, *,
protocol: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
The Live Event endpoint.
:param pulumi.Input[str] protocol: The endpoint protocol.
:param pulumi.Input[str] url: The endpoint URL.
"""
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[str]]:
"""
The endpoint protocol.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The endpoint URL.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class LiveEventInputArgs:
def __init__(__self__, *,
streaming_protocol: pulumi.Input[Union[str, 'LiveEventInputProtocol']],
access_control: Optional[pulumi.Input['LiveEventInputAccessControlArgs']] = None,
access_token: Optional[pulumi.Input[str]] = None,
endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]] = None,
key_frame_interval_duration: Optional[pulumi.Input[str]] = None):
"""
The Live Event input.
:param pulumi.Input[Union[str, 'LiveEventInputProtocol']] streaming_protocol: The streaming protocol for the Live Event. This is specified at creation time and cannot be updated.
:param pulumi.Input['LiveEventInputAccessControlArgs'] access_control: The access control for LiveEvent Input.
:param pulumi.Input[str] access_token: A unique identifier for a stream. This can be specified at creation time but cannot be updated. If omitted, the service will generate a unique value.
:param pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]] endpoints: The input endpoints for the Live Event.
:param pulumi.Input[str] key_frame_interval_duration: ISO 8601 timespan duration of the key frame interval duration.
"""
pulumi.set(__self__, "streaming_protocol", streaming_protocol)
if access_control is not None:
pulumi.set(__self__, "access_control", access_control)
if access_token is not None:
pulumi.set(__self__, "access_token", access_token)
if endpoints is not None:
pulumi.set(__self__, "endpoints", endpoints)
if key_frame_interval_duration is not None:
pulumi.set(__self__, "key_frame_interval_duration", key_frame_interval_duration)
@property
@pulumi.getter(name="streamingProtocol")
def streaming_protocol(self) -> pulumi.Input[Union[str, 'LiveEventInputProtocol']]:
"""
The streaming protocol for the Live Event. This is specified at creation time and cannot be updated.
"""
return pulumi.get(self, "streaming_protocol")
@streaming_protocol.setter
def streaming_protocol(self, value: pulumi.Input[Union[str, 'LiveEventInputProtocol']]):
pulumi.set(self, "streaming_protocol", value)
@property
@pulumi.getter(name="accessControl")
def access_control(self) -> Optional[pulumi.Input['LiveEventInputAccessControlArgs']]:
"""
The access control for LiveEvent Input.
"""
return pulumi.get(self, "access_control")
@access_control.setter
def access_control(self, value: Optional[pulumi.Input['LiveEventInputAccessControlArgs']]):
pulumi.set(self, "access_control", value)
@property
@pulumi.getter(name="accessToken")
def access_token(self) -> Optional[pulumi.Input[str]]:
"""
A unique identifier for a stream. This can be specified at creation time but cannot be updated. If omitted, the service will generate a unique value.
"""
return pulumi.get(self, "access_token")
@access_token.setter
def access_token(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_token", value)
@property
@pulumi.getter
def endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]]:
"""
The input endpoints for the Live Event.
"""
return pulumi.get(self, "endpoints")
@endpoints.setter
def endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]]):
pulumi.set(self, "endpoints", value)
@property
@pulumi.getter(name="keyFrameIntervalDuration")
def key_frame_interval_duration(self) -> Optional[pulumi.Input[str]]:
"""
ISO 8601 timespan duration of the key frame interval duration.
"""
return pulumi.get(self, "key_frame_interval_duration")
@key_frame_interval_duration.setter
def key_frame_interval_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_frame_interval_duration", value)
@pulumi.input_type
class LiveEventInputAccessControlArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input['IPAccessControlArgs']] = None):
"""
The IP access control for Live Event Input.
:param pulumi.Input['IPAccessControlArgs'] ip: The IP access control properties.
"""
if ip is not None:
pulumi.set(__self__, "ip", ip)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input['IPAccessControlArgs']]:
"""
The IP access control properties.
"""
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input['IPAccessControlArgs']]):
pulumi.set(self, "ip", value)
@pulumi.input_type
class LiveEventPreviewArgs:
def __init__(__self__, *,
access_control: Optional[pulumi.Input['LiveEventPreviewAccessControlArgs']] = None,
alternative_media_id: Optional[pulumi.Input[str]] = None,
endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]] = None,
preview_locator: Optional[pulumi.Input[str]] = None,
streaming_policy_name: Optional[pulumi.Input[str]] = None):
"""
The Live Event preview.
:param pulumi.Input['LiveEventPreviewAccessControlArgs'] access_control: The access control for LiveEvent preview.
:param pulumi.Input[str] alternative_media_id: An Alternative Media Identifier associated with the StreamingLocator created for the preview. This value is specified at creation time and cannot be updated. The identifier can be used in the CustomLicenseAcquisitionUrlTemplate or the CustomKeyAcquisitionUrlTemplate of the StreamingPolicy specified in the StreamingPolicyName field.
:param pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]] endpoints: The endpoints for preview.
:param pulumi.Input[str] preview_locator: The identifier of the preview locator in Guid format. Specifying this at creation time allows the caller to know the preview locator url before the event is created. If omitted, the service will generate a random identifier. This value cannot be updated once the live event is created.
:param pulumi.Input[str] streaming_policy_name: The name of streaming policy used for the LiveEvent preview. This value is specified at creation time and cannot be updated.
"""
if access_control is not None:
pulumi.set(__self__, "access_control", access_control)
if alternative_media_id is not None:
pulumi.set(__self__, "alternative_media_id", alternative_media_id)
if endpoints is not None:
pulumi.set(__self__, "endpoints", endpoints)
if preview_locator is not None:
pulumi.set(__self__, "preview_locator", preview_locator)
if streaming_policy_name is not None:
pulumi.set(__self__, "streaming_policy_name", streaming_policy_name)
@property
@pulumi.getter(name="accessControl")
def access_control(self) -> Optional[pulumi.Input['LiveEventPreviewAccessControlArgs']]:
"""
The access control for LiveEvent preview.
"""
return pulumi.get(self, "access_control")
@access_control.setter
def access_control(self, value: Optional[pulumi.Input['LiveEventPreviewAccessControlArgs']]):
pulumi.set(self, "access_control", value)
@property
@pulumi.getter(name="alternativeMediaId")
def alternative_media_id(self) -> Optional[pulumi.Input[str]]:
"""
An Alternative Media Identifier associated with the StreamingLocator created for the preview. This value is specified at creation time and cannot be updated. The identifier can be used in the CustomLicenseAcquisitionUrlTemplate or the CustomKeyAcquisitionUrlTemplate of the StreamingPolicy specified in the StreamingPolicyName field.
"""
return pulumi.get(self, "alternative_media_id")
@alternative_media_id.setter
def alternative_media_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alternative_media_id", value)
@property
@pulumi.getter
def endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]]:
"""
The endpoints for preview.
"""
return pulumi.get(self, "endpoints")
@endpoints.setter
def endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]]):
pulumi.set(self, "endpoints", value)
@property
@pulumi.getter(name="previewLocator")
def preview_locator(self) -> Optional[pulumi.Input[str]]:
"""
The identifier of the preview locator in Guid format. Specifying this at creation time allows the caller to know the preview locator url before the event is created. If omitted, the service will generate a random identifier. This value cannot be updated once the live event is created.
"""
return pulumi.get(self, "preview_locator")
@preview_locator.setter
def preview_locator(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "preview_locator", value)
@property
@pulumi.getter(name="streamingPolicyName")
def streaming_policy_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of streaming policy used for the LiveEvent preview. This value is specified at creation time and cannot be updated.
"""
return pulumi.get(self, "streaming_policy_name")
@streaming_policy_name.setter
def streaming_policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "streaming_policy_name", value)
@pulumi.input_type
class LiveEventPreviewAccessControlArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input['IPAccessControlArgs']] = None):
"""
The IP access control for Live Event preview.
:param pulumi.Input['IPAccessControlArgs'] ip: The IP access control properties.
"""
if ip is not None:
pulumi.set(__self__, "ip", ip)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input['IPAccessControlArgs']]:
"""
The IP access control properties.
"""
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input['IPAccessControlArgs']]):
pulumi.set(self, "ip", value)
@pulumi.input_type
class Mp4FormatArgs:
def __init__(__self__, *,
filename_pattern: pulumi.Input[str],
odata_type: pulumi.Input[str],
output_files: Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]] = None):
"""
Describes the properties for an output ISO MP4 file.
:param pulumi.Input[str] filename_pattern: The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.MultiBitrateFormat'.
:param pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]] output_files: The list of output files to produce. Each entry in the list is a set of audio and video layer labels to be muxed together .
"""
pulumi.set(__self__, "filename_pattern", filename_pattern)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.MultiBitrateFormat')
if output_files is not None:
pulumi.set(__self__, "output_files", output_files)
@property
@pulumi.getter(name="filenamePattern")
def filename_pattern(self) -> pulumi.Input[str]:
"""
The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
"""
return pulumi.get(self, "filename_pattern")
@filename_pattern.setter
def filename_pattern(self, value: pulumi.Input[str]):
pulumi.set(self, "filename_pattern", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.MultiBitrateFormat'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="outputFiles")
def output_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]]:
"""
The list of output files to produce. Each entry in the list is a set of audio and video layer labels to be muxed together .
"""
return pulumi.get(self, "output_files")
@output_files.setter
def output_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]]):
pulumi.set(self, "output_files", value)
@pulumi.input_type
class MultiBitrateFormatArgs:
def __init__(__self__, *,
filename_pattern: pulumi.Input[str],
odata_type: pulumi.Input[str],
output_files: Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]] = None):
"""
Describes the properties for producing a collection of GOP aligned multi-bitrate files. The default behavior is to produce one output file for each video layer which is muxed together with all the audios. The exact output files produced can be controlled by specifying the outputFiles collection.
:param pulumi.Input[str] filename_pattern: The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.MultiBitrateFormat'.
:param pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]] output_files: The list of output files to produce. Each entry in the list is a set of audio and video layer labels to be muxed together .
"""
pulumi.set(__self__, "filename_pattern", filename_pattern)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.MultiBitrateFormat')
if output_files is not None:
pulumi.set(__self__, "output_files", output_files)
@property
@pulumi.getter(name="filenamePattern")
def filename_pattern(self) -> pulumi.Input[str]:
"""
The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
"""
return pulumi.get(self, "filename_pattern")
@filename_pattern.setter
def filename_pattern(self, value: pulumi.Input[str]):
pulumi.set(self, "filename_pattern", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.MultiBitrateFormat'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="outputFiles")
def output_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]]:
"""
The list of output files to produce. Each entry in the list is a set of audio and video layer labels to be muxed together .
"""
return pulumi.get(self, "output_files")
@output_files.setter
def output_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]]):
pulumi.set(self, "output_files", value)
@pulumi.input_type
class NoEncryptionArgs:
def __init__(__self__, *,
enabled_protocols: Optional[pulumi.Input['EnabledProtocolsArgs']] = None):
"""
Class for NoEncryption scheme
:param pulumi.Input['EnabledProtocolsArgs'] enabled_protocols: Representing supported protocols
"""
if enabled_protocols is not None:
pulumi.set(__self__, "enabled_protocols", enabled_protocols)
@property
@pulumi.getter(name="enabledProtocols")
def enabled_protocols(self) -> Optional[pulumi.Input['EnabledProtocolsArgs']]:
"""
Representing supported protocols
"""
return pulumi.get(self, "enabled_protocols")
@enabled_protocols.setter
def enabled_protocols(self, value: Optional[pulumi.Input['EnabledProtocolsArgs']]):
pulumi.set(self, "enabled_protocols", value)
@pulumi.input_type
class OutputFileArgs:
def __init__(__self__, *,
labels: pulumi.Input[Sequence[pulumi.Input[str]]]):
"""
Represents an output file produced.
:param pulumi.Input[Sequence[pulumi.Input[str]]] labels: The list of labels that describe how the encoder should multiplex video and audio into an output file. For example, if the encoder is producing two video layers with labels v1 and v2, and one audio layer with label a1, then an array like '[v1, a1]' tells the encoder to produce an output file with the video track represented by v1 and the audio track represented by a1.
"""
pulumi.set(__self__, "labels", labels)
@property
@pulumi.getter
def labels(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The list of labels that describe how the encoder should multiplex video and audio into an output file. For example, if the encoder is producing two video layers with labels v1 and v2, and one audio layer with label a1, then an array like '[v1, a1]' tells the encoder to produce an output file with the video track represented by v1 and the audio track represented by a1.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "labels", value)
@pulumi.input_type
class PngFormatArgs:
def __init__(__self__, *,
filename_pattern: pulumi.Input[str],
odata_type: pulumi.Input[str]):
"""
Describes the settings for producing PNG thumbnails.
:param pulumi.Input[str] filename_pattern: The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.ImageFormat'.
"""
pulumi.set(__self__, "filename_pattern", filename_pattern)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.ImageFormat')
@property
@pulumi.getter(name="filenamePattern")
def filename_pattern(self) -> pulumi.Input[str]:
"""
The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
"""
return pulumi.get(self, "filename_pattern")
@filename_pattern.setter
def filename_pattern(self, value: pulumi.Input[str]):
pulumi.set(self, "filename_pattern", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.ImageFormat'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@pulumi.input_type
class PngImageArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
start: pulumi.Input[str],
key_frame_interval: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
layers: Optional[pulumi.Input[Sequence[pulumi.Input['PngLayerArgs']]]] = None,
range: Optional[pulumi.Input[str]] = None,
step: Optional[pulumi.Input[str]] = None,
stretch_mode: Optional[pulumi.Input[Union[str, 'StretchMode']]] = None):
"""
Describes the properties for producing a series of PNG images from the input video.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
:param pulumi.Input[str] start: The position in the input video from where to start generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT05S), or a frame count (For example, 10 for the 10th frame), or a relative value (For example, 1%). Also supports a macro {Best}, which tells the encoder to select the best thumbnail from the first few seconds of the video.
:param pulumi.Input[str] key_frame_interval: The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
:param pulumi.Input[Sequence[pulumi.Input['PngLayerArgs']]] layers: A collection of output PNG image layers to be produced by the encoder.
:param pulumi.Input[str] range: The position in the input video at which to stop generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT5M30S to stop at 5 minutes and 30 seconds), or a frame count (For example, 300 to stop at the 300th frame), or a relative value (For example, 100%).
:param pulumi.Input[str] step: The intervals at which thumbnails are generated. The value can be in absolute timestamp (ISO 8601, e.g: PT05S for one image every 5 seconds), or a frame count (For example, 30 for every 30 frames), or a relative value (For example, 1%).
:param pulumi.Input[Union[str, 'StretchMode']] stretch_mode: The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.Video')
pulumi.set(__self__, "start", start)
if key_frame_interval is not None:
pulumi.set(__self__, "key_frame_interval", key_frame_interval)
if label is not None:
pulumi.set(__self__, "label", label)
if layers is not None:
pulumi.set(__self__, "layers", layers)
if range is not None:
pulumi.set(__self__, "range", range)
if step is not None:
pulumi.set(__self__, "step", step)
if stretch_mode is not None:
pulumi.set(__self__, "stretch_mode", stretch_mode)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def start(self) -> pulumi.Input[str]:
"""
The position in the input video from where to start generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT05S), or a frame count (For example, 10 for the 10th frame), or a relative value (For example, 1%). Also supports a macro {Best}, which tells the encoder to select the best thumbnail from the first few seconds of the video.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: pulumi.Input[str]):
pulumi.set(self, "start", value)
@property
@pulumi.getter(name="keyFrameInterval")
def key_frame_interval(self) -> Optional[pulumi.Input[str]]:
"""
The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
"""
return pulumi.get(self, "key_frame_interval")
@key_frame_interval.setter
def key_frame_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_frame_interval", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def layers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PngLayerArgs']]]]:
"""
A collection of output PNG image layers to be produced by the encoder.
"""
return pulumi.get(self, "layers")
@layers.setter
def layers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PngLayerArgs']]]]):
pulumi.set(self, "layers", value)
@property
@pulumi.getter
def range(self) -> Optional[pulumi.Input[str]]:
"""
The position in the input video at which to stop generating thumbnails. The value can be in absolute timestamp (ISO 8601, e.g: PT5M30S to stop at 5 minutes and 30 seconds), or a frame count (For example, 300 to stop at the 300th frame), or a relative value (For example, 100%).
"""
return pulumi.get(self, "range")
@range.setter
def range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "range", value)
@property
@pulumi.getter
def step(self) -> Optional[pulumi.Input[str]]:
"""
The intervals at which thumbnails are generated. The value can be in absolute timestamp (ISO 8601, e.g: PT05S for one image every 5 seconds), or a frame count (For example, 30 for every 30 frames), or a relative value (For example, 1%).
"""
return pulumi.get(self, "step")
@step.setter
def step(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "step", value)
@property
@pulumi.getter(name="stretchMode")
def stretch_mode(self) -> Optional[pulumi.Input[Union[str, 'StretchMode']]]:
"""
The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
return pulumi.get(self, "stretch_mode")
@stretch_mode.setter
def stretch_mode(self, value: Optional[pulumi.Input[Union[str, 'StretchMode']]]):
pulumi.set(self, "stretch_mode", value)
@pulumi.input_type
class PngLayerArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
height: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
width: Optional[pulumi.Input[str]] = None):
"""
Describes the settings to produce a PNG image from the input video.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.PngLayer'.
:param pulumi.Input[str] height: The height of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in height as the input.
:param pulumi.Input[str] label: The alphanumeric label for this layer, which can be used in multiplexing different video and audio layers, or in naming the output file.
:param pulumi.Input[str] width: The width of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in width as the input.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.PngLayer')
if height is not None:
pulumi.set(__self__, "height", height)
if label is not None:
pulumi.set(__self__, "label", label)
if width is not None:
pulumi.set(__self__, "width", width)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.PngLayer'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def height(self) -> Optional[pulumi.Input[str]]:
"""
The height of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in height as the input.
"""
return pulumi.get(self, "height")
@height.setter
def height(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "height", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
The alphanumeric label for this layer, which can be used in multiplexing different video and audio layers, or in naming the output file.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def width(self) -> Optional[pulumi.Input[str]]:
"""
The width of the output video for this layer. The value can be absolute (in pixels) or relative (in percentage). For example 50% means the output video has half as many pixels in width as the input.
"""
return pulumi.get(self, "width")
@width.setter
def width(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "width", value)
@pulumi.input_type
class PresentationTimeRangeArgs:
def __init__(__self__, *,
end_timestamp: Optional[pulumi.Input[float]] = None,
force_end_timestamp: Optional[pulumi.Input[bool]] = None,
live_backoff_duration: Optional[pulumi.Input[float]] = None,
presentation_window_duration: Optional[pulumi.Input[float]] = None,
start_timestamp: Optional[pulumi.Input[float]] = None,
timescale: Optional[pulumi.Input[float]] = None):
"""
The presentation time range, this is asset related and not recommended for Account Filter.
:param pulumi.Input[float] end_timestamp: The absolute end time boundary.
:param pulumi.Input[bool] force_end_timestamp: The indicator of forcing existing of end time stamp.
:param pulumi.Input[float] live_backoff_duration: The relative to end right edge.
:param pulumi.Input[float] presentation_window_duration: The relative to end sliding window.
:param pulumi.Input[float] start_timestamp: The absolute start time boundary.
:param pulumi.Input[float] timescale: The time scale of time stamps.
"""
if end_timestamp is not None:
pulumi.set(__self__, "end_timestamp", end_timestamp)
if force_end_timestamp is not None:
pulumi.set(__self__, "force_end_timestamp", force_end_timestamp)
if live_backoff_duration is not None:
pulumi.set(__self__, "live_backoff_duration", live_backoff_duration)
if presentation_window_duration is not None:
pulumi.set(__self__, "presentation_window_duration", presentation_window_duration)
if start_timestamp is not None:
pulumi.set(__self__, "start_timestamp", start_timestamp)
if timescale is not None:
pulumi.set(__self__, "timescale", timescale)
@property
@pulumi.getter(name="endTimestamp")
def end_timestamp(self) -> Optional[pulumi.Input[float]]:
"""
The absolute end time boundary.
"""
return pulumi.get(self, "end_timestamp")
@end_timestamp.setter
def end_timestamp(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "end_timestamp", value)
@property
@pulumi.getter(name="forceEndTimestamp")
def force_end_timestamp(self) -> Optional[pulumi.Input[bool]]:
"""
The indicator of forcing existing of end time stamp.
"""
return pulumi.get(self, "force_end_timestamp")
@force_end_timestamp.setter
def force_end_timestamp(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_end_timestamp", value)
@property
@pulumi.getter(name="liveBackoffDuration")
def live_backoff_duration(self) -> Optional[pulumi.Input[float]]:
"""
The relative to end right edge.
"""
return pulumi.get(self, "live_backoff_duration")
@live_backoff_duration.setter
def live_backoff_duration(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "live_backoff_duration", value)
@property
@pulumi.getter(name="presentationWindowDuration")
def presentation_window_duration(self) -> Optional[pulumi.Input[float]]:
"""
The relative to end sliding window.
"""
return pulumi.get(self, "presentation_window_duration")
@presentation_window_duration.setter
def presentation_window_duration(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "presentation_window_duration", value)
@property
@pulumi.getter(name="startTimestamp")
def start_timestamp(self) -> Optional[pulumi.Input[float]]:
"""
The absolute start time boundary.
"""
return pulumi.get(self, "start_timestamp")
@start_timestamp.setter
def start_timestamp(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "start_timestamp", value)
@property
@pulumi.getter
def timescale(self) -> Optional[pulumi.Input[float]]:
"""
The time scale of time stamps.
"""
return pulumi.get(self, "timescale")
@timescale.setter
def timescale(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "timescale", value)
@pulumi.input_type
class RectangleArgs:
def __init__(__self__, *,
height: Optional[pulumi.Input[str]] = None,
left: Optional[pulumi.Input[str]] = None,
top: Optional[pulumi.Input[str]] = None,
width: Optional[pulumi.Input[str]] = None):
"""
Describes the properties of a rectangular window applied to the input media before processing it.
:param pulumi.Input[str] height: The height of the rectangular region in pixels. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
:param pulumi.Input[str] left: The number of pixels from the left-margin. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
:param pulumi.Input[str] top: The number of pixels from the top-margin. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
:param pulumi.Input[str] width: The width of the rectangular region in pixels. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
"""
if height is not None:
pulumi.set(__self__, "height", height)
if left is not None:
pulumi.set(__self__, "left", left)
if top is not None:
pulumi.set(__self__, "top", top)
if width is not None:
pulumi.set(__self__, "width", width)
@property
@pulumi.getter
def height(self) -> Optional[pulumi.Input[str]]:
"""
The height of the rectangular region in pixels. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
"""
return pulumi.get(self, "height")
@height.setter
def height(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "height", value)
@property
@pulumi.getter
def left(self) -> Optional[pulumi.Input[str]]:
"""
The number of pixels from the left-margin. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
"""
return pulumi.get(self, "left")
@left.setter
def left(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "left", value)
@property
@pulumi.getter
def top(self) -> Optional[pulumi.Input[str]]:
"""
The number of pixels from the top-margin. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
"""
return pulumi.get(self, "top")
@top.setter
def top(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "top", value)
@property
@pulumi.getter
def width(self) -> Optional[pulumi.Input[str]]:
"""
The width of the rectangular region in pixels. This can be absolute pixel value (e.g 100), or relative to the size of the video (For example, 50%).
"""
return pulumi.get(self, "width")
@width.setter
def width(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "width", value)
@pulumi.input_type
class StandardEncoderPresetArgs:
def __init__(__self__, *,
codecs: pulumi.Input[Sequence[pulumi.Input[Union['AacAudioArgs', 'AudioArgs', 'CopyAudioArgs', 'CopyVideoArgs', 'H264VideoArgs', 'ImageArgs', 'JpgImageArgs', 'PngImageArgs', 'VideoArgs']]]],
formats: pulumi.Input[Sequence[pulumi.Input[Union['ImageFormatArgs', 'JpgFormatArgs', 'Mp4FormatArgs', 'MultiBitrateFormatArgs', 'PngFormatArgs', 'TransportStreamFormatArgs']]]],
odata_type: pulumi.Input[str],
filters: Optional[pulumi.Input['FiltersArgs']] = None):
"""
Describes all the settings to be used when encoding the input video with the Standard Encoder.
:param pulumi.Input[Sequence[pulumi.Input[Union['AacAudioArgs', 'AudioArgs', 'CopyAudioArgs', 'CopyVideoArgs', 'H264VideoArgs', 'ImageArgs', 'JpgImageArgs', 'PngImageArgs', 'VideoArgs']]]] codecs: The list of codecs to be used when encoding the input video.
:param pulumi.Input[Sequence[pulumi.Input[Union['ImageFormatArgs', 'JpgFormatArgs', 'Mp4FormatArgs', 'MultiBitrateFormatArgs', 'PngFormatArgs', 'TransportStreamFormatArgs']]]] formats: The list of outputs to be produced by the encoder.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.StandardEncoderPreset'.
:param pulumi.Input['FiltersArgs'] filters: One or more filtering operations that are applied to the input media before encoding.
"""
pulumi.set(__self__, "codecs", codecs)
pulumi.set(__self__, "formats", formats)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.StandardEncoderPreset')
if filters is not None:
pulumi.set(__self__, "filters", filters)
@property
@pulumi.getter
def codecs(self) -> pulumi.Input[Sequence[pulumi.Input[Union['AacAudioArgs', 'AudioArgs', 'CopyAudioArgs', 'CopyVideoArgs', 'H264VideoArgs', 'ImageArgs', 'JpgImageArgs', 'PngImageArgs', 'VideoArgs']]]]:
"""
The list of codecs to be used when encoding the input video.
"""
return pulumi.get(self, "codecs")
@codecs.setter
def codecs(self, value: pulumi.Input[Sequence[pulumi.Input[Union['AacAudioArgs', 'AudioArgs', 'CopyAudioArgs', 'CopyVideoArgs', 'H264VideoArgs', 'ImageArgs', 'JpgImageArgs', 'PngImageArgs', 'VideoArgs']]]]):
pulumi.set(self, "codecs", value)
@property
@pulumi.getter
def formats(self) -> pulumi.Input[Sequence[pulumi.Input[Union['ImageFormatArgs', 'JpgFormatArgs', 'Mp4FormatArgs', 'MultiBitrateFormatArgs', 'PngFormatArgs', 'TransportStreamFormatArgs']]]]:
"""
The list of outputs to be produced by the encoder.
"""
return pulumi.get(self, "formats")
@formats.setter
def formats(self, value: pulumi.Input[Sequence[pulumi.Input[Union['ImageFormatArgs', 'JpgFormatArgs', 'Mp4FormatArgs', 'MultiBitrateFormatArgs', 'PngFormatArgs', 'TransportStreamFormatArgs']]]]):
pulumi.set(self, "formats", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.StandardEncoderPreset'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input['FiltersArgs']]:
"""
One or more filtering operations that are applied to the input media before encoding.
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input['FiltersArgs']]):
pulumi.set(self, "filters", value)
@pulumi.input_type
class StorageAccountArgs:
def __init__(__self__, *,
type: pulumi.Input[Union[str, 'StorageAccountType']],
id: Optional[pulumi.Input[str]] = None):
"""
The storage account details.
:param pulumi.Input[Union[str, 'StorageAccountType']] type: The type of the storage account.
:param pulumi.Input[str] id: The ID of the storage account resource. Media Services relies on tables and queues as well as blobs, so the primary storage account must be a Standard Storage account (either Microsoft.ClassicStorage or Microsoft.Storage). Blob only storage accounts can be added as secondary storage accounts.
"""
pulumi.set(__self__, "type", type)
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def type(self) -> pulumi.Input[Union[str, 'StorageAccountType']]:
"""
The type of the storage account.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[Union[str, 'StorageAccountType']]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the storage account resource. Media Services relies on tables and queues as well as blobs, so the primary storage account must be a Standard Storage account (either Microsoft.ClassicStorage or Microsoft.Storage). Blob only storage accounts can be added as secondary storage accounts.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@pulumi.input_type
class StreamingEndpointAccessControlArgs:
def __init__(__self__, *,
akamai: Optional[pulumi.Input['AkamaiAccessControlArgs']] = None,
ip: Optional[pulumi.Input['IPAccessControlArgs']] = None):
"""
StreamingEndpoint access control definition.
:param pulumi.Input['AkamaiAccessControlArgs'] akamai: The access control of Akamai
:param pulumi.Input['IPAccessControlArgs'] ip: The IP access control of the StreamingEndpoint.
"""
if akamai is not None:
pulumi.set(__self__, "akamai", akamai)
if ip is not None:
pulumi.set(__self__, "ip", ip)
@property
@pulumi.getter
def akamai(self) -> Optional[pulumi.Input['AkamaiAccessControlArgs']]:
"""
The access control of Akamai
"""
return pulumi.get(self, "akamai")
@akamai.setter
def akamai(self, value: Optional[pulumi.Input['AkamaiAccessControlArgs']]):
pulumi.set(self, "akamai", value)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input['IPAccessControlArgs']]:
"""
The IP access control of the StreamingEndpoint.
"""
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input['IPAccessControlArgs']]):
pulumi.set(self, "ip", value)
@pulumi.input_type
class StreamingLocatorContentKeyArgs:
def __init__(__self__, *,
id: pulumi.Input[str],
label_reference_in_streaming_policy: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None):
"""
Class for content key in Streaming Locator
:param pulumi.Input[str] id: ID of Content Key
:param pulumi.Input[str] label_reference_in_streaming_policy: Label of Content Key as specified in the Streaming Policy
:param pulumi.Input[str] value: Value of Content Key
"""
pulumi.set(__self__, "id", id)
if label_reference_in_streaming_policy is not None:
pulumi.set(__self__, "label_reference_in_streaming_policy", label_reference_in_streaming_policy)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def id(self) -> pulumi.Input[str]:
"""
ID of Content Key
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: pulumi.Input[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="labelReferenceInStreamingPolicy")
def label_reference_in_streaming_policy(self) -> Optional[pulumi.Input[str]]:
"""
Label of Content Key as specified in the Streaming Policy
"""
return pulumi.get(self, "label_reference_in_streaming_policy")
@label_reference_in_streaming_policy.setter
def label_reference_in_streaming_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label_reference_in_streaming_policy", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
Value of Content Key
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@pulumi.input_type
class StreamingPolicyContentKeyArgs:
def __init__(__self__, *,
label: Optional[pulumi.Input[str]] = None,
policy_name: Optional[pulumi.Input[str]] = None,
tracks: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]] = None):
"""
Class to specify properties of content key
:param pulumi.Input[str] label: Label can be used to specify Content Key when creating a Streaming Locator
:param pulumi.Input[str] policy_name: Policy used by Content Key
:param pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]] tracks: Tracks which use this content key
"""
if label is not None:
pulumi.set(__self__, "label", label)
if policy_name is not None:
pulumi.set(__self__, "policy_name", policy_name)
if tracks is not None:
pulumi.set(__self__, "tracks", tracks)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
Label can be used to specify Content Key when creating a Streaming Locator
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter(name="policyName")
def policy_name(self) -> Optional[pulumi.Input[str]]:
"""
Policy used by Content Key
"""
return pulumi.get(self, "policy_name")
@policy_name.setter
def policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_name", value)
@property
@pulumi.getter
def tracks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]:
"""
Tracks which use this content key
"""
return pulumi.get(self, "tracks")
@tracks.setter
def tracks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TrackSelectionArgs']]]]):
pulumi.set(self, "tracks", value)
@pulumi.input_type
class StreamingPolicyContentKeysArgs:
def __init__(__self__, *,
default_key: Optional[pulumi.Input['DefaultKeyArgs']] = None,
key_to_track_mappings: Optional[pulumi.Input[Sequence[pulumi.Input['StreamingPolicyContentKeyArgs']]]] = None):
"""
Class to specify properties of all content keys in Streaming Policy
:param pulumi.Input['DefaultKeyArgs'] default_key: Default content key for an encryption scheme
:param pulumi.Input[Sequence[pulumi.Input['StreamingPolicyContentKeyArgs']]] key_to_track_mappings: Representing tracks needs separate content key
"""
if default_key is not None:
pulumi.set(__self__, "default_key", default_key)
if key_to_track_mappings is not None:
pulumi.set(__self__, "key_to_track_mappings", key_to_track_mappings)
@property
@pulumi.getter(name="defaultKey")
def default_key(self) -> Optional[pulumi.Input['DefaultKeyArgs']]:
"""
Default content key for an encryption scheme
"""
return pulumi.get(self, "default_key")
@default_key.setter
def default_key(self, value: Optional[pulumi.Input['DefaultKeyArgs']]):
pulumi.set(self, "default_key", value)
@property
@pulumi.getter(name="keyToTrackMappings")
def key_to_track_mappings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['StreamingPolicyContentKeyArgs']]]]:
"""
Representing tracks needs separate content key
"""
return pulumi.get(self, "key_to_track_mappings")
@key_to_track_mappings.setter
def key_to_track_mappings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['StreamingPolicyContentKeyArgs']]]]):
pulumi.set(self, "key_to_track_mappings", value)
@pulumi.input_type
class StreamingPolicyFairPlayConfigurationArgs:
def __init__(__self__, *,
allow_persistent_license: pulumi.Input[bool],
custom_license_acquisition_url_template: Optional[pulumi.Input[str]] = None):
"""
Class to specify configurations of FairPlay in Streaming Policy
:param pulumi.Input[bool] allow_persistent_license: All license to be persistent or not
:param pulumi.Input[str] custom_license_acquisition_url_template: Template for the URL of the custom service delivering licenses to end user players. Not required when using Azure Media Services for issuing licenses. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
"""
pulumi.set(__self__, "allow_persistent_license", allow_persistent_license)
if custom_license_acquisition_url_template is not None:
pulumi.set(__self__, "custom_license_acquisition_url_template", custom_license_acquisition_url_template)
@property
@pulumi.getter(name="allowPersistentLicense")
def allow_persistent_license(self) -> pulumi.Input[bool]:
"""
All license to be persistent or not
"""
return pulumi.get(self, "allow_persistent_license")
@allow_persistent_license.setter
def allow_persistent_license(self, value: pulumi.Input[bool]):
pulumi.set(self, "allow_persistent_license", value)
@property
@pulumi.getter(name="customLicenseAcquisitionUrlTemplate")
def custom_license_acquisition_url_template(self) -> Optional[pulumi.Input[str]]:
"""
Template for the URL of the custom service delivering licenses to end user players. Not required when using Azure Media Services for issuing licenses. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
"""
return pulumi.get(self, "custom_license_acquisition_url_template")
@custom_license_acquisition_url_template.setter
def custom_license_acquisition_url_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_license_acquisition_url_template", value)
@pulumi.input_type
class StreamingPolicyPlayReadyConfigurationArgs:
def __init__(__self__, *,
custom_license_acquisition_url_template: Optional[pulumi.Input[str]] = None,
play_ready_custom_attributes: Optional[pulumi.Input[str]] = None):
"""
Class to specify configurations of PlayReady in Streaming Policy
:param pulumi.Input[str] custom_license_acquisition_url_template: Template for the URL of the custom service delivering licenses to end user players. Not required when using Azure Media Services for issuing licenses. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
:param pulumi.Input[str] play_ready_custom_attributes: Custom attributes for PlayReady
"""
if custom_license_acquisition_url_template is not None:
pulumi.set(__self__, "custom_license_acquisition_url_template", custom_license_acquisition_url_template)
if play_ready_custom_attributes is not None:
pulumi.set(__self__, "play_ready_custom_attributes", play_ready_custom_attributes)
@property
@pulumi.getter(name="customLicenseAcquisitionUrlTemplate")
def custom_license_acquisition_url_template(self) -> Optional[pulumi.Input[str]]:
"""
Template for the URL of the custom service delivering licenses to end user players. Not required when using Azure Media Services for issuing licenses. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
"""
return pulumi.get(self, "custom_license_acquisition_url_template")
@custom_license_acquisition_url_template.setter
def custom_license_acquisition_url_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_license_acquisition_url_template", value)
@property
@pulumi.getter(name="playReadyCustomAttributes")
def play_ready_custom_attributes(self) -> Optional[pulumi.Input[str]]:
"""
Custom attributes for PlayReady
"""
return pulumi.get(self, "play_ready_custom_attributes")
@play_ready_custom_attributes.setter
def play_ready_custom_attributes(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "play_ready_custom_attributes", value)
@pulumi.input_type
class StreamingPolicyWidevineConfigurationArgs:
def __init__(__self__, *,
custom_license_acquisition_url_template: Optional[pulumi.Input[str]] = None):
"""
Class to specify configurations of Widevine in Streaming Policy
:param pulumi.Input[str] custom_license_acquisition_url_template: Template for the URL of the custom service delivering licenses to end user players. Not required when using Azure Media Services for issuing licenses. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
"""
if custom_license_acquisition_url_template is not None:
pulumi.set(__self__, "custom_license_acquisition_url_template", custom_license_acquisition_url_template)
@property
@pulumi.getter(name="customLicenseAcquisitionUrlTemplate")
def custom_license_acquisition_url_template(self) -> Optional[pulumi.Input[str]]:
"""
Template for the URL of the custom service delivering licenses to end user players. Not required when using Azure Media Services for issuing licenses. The template supports replaceable tokens that the service will update at runtime with the value specific to the request. The currently supported token values are {AlternativeMediaId}, which is replaced with the value of StreamingLocatorId.AlternativeMediaId, and {ContentKeyId}, which is replaced with the value of identifier of the key being requested.
"""
return pulumi.get(self, "custom_license_acquisition_url_template")
@custom_license_acquisition_url_template.setter
def custom_license_acquisition_url_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_license_acquisition_url_template", value)
@pulumi.input_type
class TrackPropertyConditionArgs:
def __init__(__self__, *,
operation: pulumi.Input[Union[str, 'TrackPropertyCompareOperation']],
property: pulumi.Input[Union[str, 'TrackPropertyType']],
value: Optional[pulumi.Input[str]] = None):
"""
Class to specify one track property condition
:param pulumi.Input[Union[str, 'TrackPropertyCompareOperation']] operation: Track property condition operation
:param pulumi.Input[Union[str, 'TrackPropertyType']] property: Track property type
:param pulumi.Input[str] value: Track property value
"""
pulumi.set(__self__, "operation", operation)
pulumi.set(__self__, "property", property)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[Union[str, 'TrackPropertyCompareOperation']]:
"""
Track property condition operation
"""
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[Union[str, 'TrackPropertyCompareOperation']]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
Track property value
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def property(self) -> pulumi.Input[Union[str, 'TrackPropertyType']]:
"""
Track property type
"""
return pulumi.get(self, "property")
@property.setter
def property(self, value: pulumi.Input[Union[str, 'TrackPropertyType']]):
pulumi.set(self, "property", value)
@pulumi.input_type
class TrackSelectionArgs:
def __init__(__self__, *,
track_selections: Optional[pulumi.Input[Sequence[pulumi.Input['TrackPropertyConditionArgs']]]] = None):
"""
Class to select a track
:param pulumi.Input[Sequence[pulumi.Input['TrackPropertyConditionArgs']]] track_selections: TrackSelections is a track property condition list which can specify track(s)
"""
if track_selections is not None:
pulumi.set(__self__, "track_selections", track_selections)
@property
@pulumi.getter(name="trackSelections")
def track_selections(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TrackPropertyConditionArgs']]]]:
"""
TrackSelections is a track property condition list which can specify track(s)
"""
return pulumi.get(self, "track_selections")
@track_selections.setter
def track_selections(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TrackPropertyConditionArgs']]]]):
pulumi.set(self, "track_selections", value)
@pulumi.input_type
class TransformOutputArgs:
def __init__(__self__, *,
preset: pulumi.Input[Union['AudioAnalyzerPresetArgs', 'BuiltInStandardEncoderPresetArgs', 'FaceDetectorPresetArgs', 'StandardEncoderPresetArgs', 'VideoAnalyzerPresetArgs']],
on_error: Optional[pulumi.Input[Union[str, 'OnErrorType']]] = None,
relative_priority: Optional[pulumi.Input[Union[str, 'Priority']]] = None):
"""
Describes the properties of a TransformOutput, which are the rules to be applied while generating the desired output.
:param pulumi.Input[Union['AudioAnalyzerPresetArgs', 'BuiltInStandardEncoderPresetArgs', 'FaceDetectorPresetArgs', 'StandardEncoderPresetArgs', 'VideoAnalyzerPresetArgs']] preset: Preset that describes the operations that will be used to modify, transcode, or extract insights from the source file to generate the output.
:param pulumi.Input[Union[str, 'OnErrorType']] on_error: A Transform can define more than one outputs. This property defines what the service should do when one output fails - either continue to produce other outputs, or, stop the other outputs. The overall Job state will not reflect failures of outputs that are specified with 'ContinueJob'. The default is 'StopProcessingJob'.
:param pulumi.Input[Union[str, 'Priority']] relative_priority: Sets the relative priority of the TransformOutputs within a Transform. This sets the priority that the service uses for processing TransformOutputs. The default priority is Normal.
"""
pulumi.set(__self__, "preset", preset)
if on_error is not None:
pulumi.set(__self__, "on_error", on_error)
if relative_priority is not None:
pulumi.set(__self__, "relative_priority", relative_priority)
@property
@pulumi.getter
def preset(self) -> pulumi.Input[Union['AudioAnalyzerPresetArgs', 'BuiltInStandardEncoderPresetArgs', 'FaceDetectorPresetArgs', 'StandardEncoderPresetArgs', 'VideoAnalyzerPresetArgs']]:
"""
Preset that describes the operations that will be used to modify, transcode, or extract insights from the source file to generate the output.
"""
return pulumi.get(self, "preset")
@preset.setter
def preset(self, value: pulumi.Input[Union['AudioAnalyzerPresetArgs', 'BuiltInStandardEncoderPresetArgs', 'FaceDetectorPresetArgs', 'StandardEncoderPresetArgs', 'VideoAnalyzerPresetArgs']]):
pulumi.set(self, "preset", value)
@property
@pulumi.getter(name="onError")
def on_error(self) -> Optional[pulumi.Input[Union[str, 'OnErrorType']]]:
"""
A Transform can define more than one outputs. This property defines what the service should do when one output fails - either continue to produce other outputs, or, stop the other outputs. The overall Job state will not reflect failures of outputs that are specified with 'ContinueJob'. The default is 'StopProcessingJob'.
"""
return pulumi.get(self, "on_error")
@on_error.setter
def on_error(self, value: Optional[pulumi.Input[Union[str, 'OnErrorType']]]):
pulumi.set(self, "on_error", value)
@property
@pulumi.getter(name="relativePriority")
def relative_priority(self) -> Optional[pulumi.Input[Union[str, 'Priority']]]:
"""
Sets the relative priority of the TransformOutputs within a Transform. This sets the priority that the service uses for processing TransformOutputs. The default priority is Normal.
"""
return pulumi.get(self, "relative_priority")
@relative_priority.setter
def relative_priority(self, value: Optional[pulumi.Input[Union[str, 'Priority']]]):
pulumi.set(self, "relative_priority", value)
@pulumi.input_type
class TransportStreamFormatArgs:
def __init__(__self__, *,
filename_pattern: pulumi.Input[str],
odata_type: pulumi.Input[str],
output_files: Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]] = None):
"""
Describes the properties for generating an MPEG-2 Transport Stream (ISO/IEC 13818-1) output video file(s).
:param pulumi.Input[str] filename_pattern: The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.MultiBitrateFormat'.
:param pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]] output_files: The list of output files to produce. Each entry in the list is a set of audio and video layer labels to be muxed together .
"""
pulumi.set(__self__, "filename_pattern", filename_pattern)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.MultiBitrateFormat')
if output_files is not None:
pulumi.set(__self__, "output_files", output_files)
@property
@pulumi.getter(name="filenamePattern")
def filename_pattern(self) -> pulumi.Input[str]:
"""
The pattern of the file names for the generated output files. The following macros are supported in the file name: {Basename} - The base name of the input video {Extension} - The appropriate extension for this format. {Label} - The label assigned to the codec/layer. {Index} - A unique index for thumbnails. Only applicable to thumbnails. {Bitrate} - The audio/video bitrate. Not applicable to thumbnails. {Codec} - The type of the audio/video codec. Any unsubstituted macros will be collapsed and removed from the filename.
"""
return pulumi.get(self, "filename_pattern")
@filename_pattern.setter
def filename_pattern(self, value: pulumi.Input[str]):
pulumi.set(self, "filename_pattern", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.MultiBitrateFormat'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="outputFiles")
def output_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]]:
"""
The list of output files to produce. Each entry in the list is a set of audio and video layer labels to be muxed together .
"""
return pulumi.get(self, "output_files")
@output_files.setter
def output_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OutputFileArgs']]]]):
pulumi.set(self, "output_files", value)
@pulumi.input_type
class VideoArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
key_frame_interval: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
stretch_mode: Optional[pulumi.Input[Union[str, 'StretchMode']]] = None):
"""
Describes the basic properties for encoding the input video.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
:param pulumi.Input[str] key_frame_interval: The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
:param pulumi.Input[str] label: An optional label for the codec. The label can be used to control muxing behavior.
:param pulumi.Input[Union[str, 'StretchMode']] stretch_mode: The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.Video')
if key_frame_interval is not None:
pulumi.set(__self__, "key_frame_interval", key_frame_interval)
if label is not None:
pulumi.set(__self__, "label", label)
if stretch_mode is not None:
pulumi.set(__self__, "stretch_mode", stretch_mode)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.Video'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="keyFrameInterval")
def key_frame_interval(self) -> Optional[pulumi.Input[str]]:
"""
The distance between two key frames, thereby defining a group of pictures (GOP). The value should be a non-zero integer in the range [1, 30] seconds, specified in ISO 8601 format. The default is 2 seconds (PT2S).
"""
return pulumi.get(self, "key_frame_interval")
@key_frame_interval.setter
def key_frame_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_frame_interval", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
An optional label for the codec. The label can be used to control muxing behavior.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter(name="stretchMode")
def stretch_mode(self) -> Optional[pulumi.Input[Union[str, 'StretchMode']]]:
"""
The resizing mode - how the input video will be resized to fit the desired output resolution(s). Default is AutoSize
"""
return pulumi.get(self, "stretch_mode")
@stretch_mode.setter
def stretch_mode(self, value: Optional[pulumi.Input[Union[str, 'StretchMode']]]):
pulumi.set(self, "stretch_mode", value)
@pulumi.input_type
class VideoAnalyzerPresetArgs:
def __init__(__self__, *,
odata_type: pulumi.Input[str],
audio_language: Optional[pulumi.Input[str]] = None,
experimental_options: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
insights_to_extract: Optional[pulumi.Input[Union[str, 'InsightsType']]] = None):
"""
A video analyzer preset that extracts insights (rich metadata) from both audio and video, and outputs a JSON format file.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.AudioAnalyzerPreset'.
:param pulumi.Input[str] audio_language: The language for the audio payload in the input using the BCP-47 format of 'language tag-region' (e.g: 'en-US'). If you know the language of your content, it is recommended that you specify it. If the language isn't specified or set to null, automatic language detection will choose the first language detected and process with the selected language for the duration of the file. It does not currently support dynamically switching between languages after the first language is detected. The automatic detection works best with audio recordings with clearly discernable speech. If automatic detection fails to find the language, transcription would fallback to 'en-US'." The list of supported languages is available here: https://go.microsoft.com/fwlink/?linkid=2109463
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] experimental_options: Dictionary containing key value pairs for parameters not exposed in the preset itself
:param pulumi.Input[Union[str, 'InsightsType']] insights_to_extract: Defines the type of insights that you want the service to generate. The allowed values are 'AudioInsightsOnly', 'VideoInsightsOnly', and 'AllInsights'. The default is AllInsights. If you set this to AllInsights and the input is audio only, then only audio insights are generated. Similarly if the input is video only, then only video insights are generated. It is recommended that you not use AudioInsightsOnly if you expect some of your inputs to be video only; or use VideoInsightsOnly if you expect some of your inputs to be audio only. Your Jobs in such conditions would error out.
"""
pulumi.set(__self__, "odata_type", '#Microsoft.Media.AudioAnalyzerPreset')
if audio_language is not None:
pulumi.set(__self__, "audio_language", audio_language)
if experimental_options is not None:
pulumi.set(__self__, "experimental_options", experimental_options)
if insights_to_extract is not None:
pulumi.set(__self__, "insights_to_extract", insights_to_extract)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.AudioAnalyzerPreset'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="audioLanguage")
def audio_language(self) -> Optional[pulumi.Input[str]]:
"""
The language for the audio payload in the input using the BCP-47 format of 'language tag-region' (e.g: 'en-US'). If you know the language of your content, it is recommended that you specify it. If the language isn't specified or set to null, automatic language detection will choose the first language detected and process with the selected language for the duration of the file. It does not currently support dynamically switching between languages after the first language is detected. The automatic detection works best with audio recordings with clearly discernable speech. If automatic detection fails to find the language, transcription would fallback to 'en-US'." The list of supported languages is available here: https://go.microsoft.com/fwlink/?linkid=2109463
"""
return pulumi.get(self, "audio_language")
@audio_language.setter
def audio_language(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "audio_language", value)
@property
@pulumi.getter(name="experimentalOptions")
def experimental_options(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Dictionary containing key value pairs for parameters not exposed in the preset itself
"""
return pulumi.get(self, "experimental_options")
@experimental_options.setter
def experimental_options(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "experimental_options", value)
@property
@pulumi.getter(name="insightsToExtract")
def insights_to_extract(self) -> Optional[pulumi.Input[Union[str, 'InsightsType']]]:
"""
Defines the type of insights that you want the service to generate. The allowed values are 'AudioInsightsOnly', 'VideoInsightsOnly', and 'AllInsights'. The default is AllInsights. If you set this to AllInsights and the input is audio only, then only audio insights are generated. Similarly if the input is video only, then only video insights are generated. It is recommended that you not use AudioInsightsOnly if you expect some of your inputs to be video only; or use VideoInsightsOnly if you expect some of your inputs to be audio only. Your Jobs in such conditions would error out.
"""
return pulumi.get(self, "insights_to_extract")
@insights_to_extract.setter
def insights_to_extract(self, value: Optional[pulumi.Input[Union[str, 'InsightsType']]]):
pulumi.set(self, "insights_to_extract", value)
@pulumi.input_type
class VideoOverlayArgs:
def __init__(__self__, *,
input_label: pulumi.Input[str],
odata_type: pulumi.Input[str],
audio_gain_level: Optional[pulumi.Input[float]] = None,
crop_rectangle: Optional[pulumi.Input['RectangleArgs']] = None,
end: Optional[pulumi.Input[str]] = None,
fade_in_duration: Optional[pulumi.Input[str]] = None,
fade_out_duration: Optional[pulumi.Input[str]] = None,
opacity: Optional[pulumi.Input[float]] = None,
position: Optional[pulumi.Input['RectangleArgs']] = None,
start: Optional[pulumi.Input[str]] = None):
"""
Describes the properties of a video overlay.
:param pulumi.Input[str] input_label: The label of the job input which is to be used as an overlay. The Input must specify exactly one file. You can specify an image file in JPG or PNG formats, or an audio file (such as a WAV, MP3, WMA or M4A file), or a video file. See https://aka.ms/mesformats for the complete list of supported audio and video file formats.
:param pulumi.Input[str] odata_type: The discriminator for derived types.
Expected value is '#Microsoft.Media.VideoOverlay'.
:param pulumi.Input[float] audio_gain_level: The gain level of audio in the overlay. The value should be in the range [0, 1.0]. The default is 1.0.
:param pulumi.Input['RectangleArgs'] crop_rectangle: An optional rectangular window used to crop the overlay image or video.
:param pulumi.Input[str] end: The position in the input video at which the overlay ends. The value should be in ISO 8601 duration format. For example, PT30S to end the overlay at 30 seconds in to the input video. If not specified the overlay will be applied until the end of the input video if inputLoop is true. Else, if inputLoop is false, then overlay will last as long as the duration of the overlay media.
:param pulumi.Input[str] fade_in_duration: The duration over which the overlay fades in onto the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade in (same as PT0S).
:param pulumi.Input[str] fade_out_duration: The duration over which the overlay fades out of the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade out (same as PT0S).
:param pulumi.Input[float] opacity: The opacity of the overlay. This is a value in the range [0 - 1.0]. Default is 1.0 which mean the overlay is opaque.
:param pulumi.Input['RectangleArgs'] position: The location in the input video where the overlay is applied.
:param pulumi.Input[str] start: The start position, with reference to the input video, at which the overlay starts. The value should be in ISO 8601 format. For example, PT05S to start the overlay at 5 seconds in to the input video. If not specified the overlay starts from the beginning of the input video.
"""
pulumi.set(__self__, "input_label", input_label)
pulumi.set(__self__, "odata_type", '#Microsoft.Media.VideoOverlay')
if audio_gain_level is not None:
pulumi.set(__self__, "audio_gain_level", audio_gain_level)
if crop_rectangle is not None:
pulumi.set(__self__, "crop_rectangle", crop_rectangle)
if end is not None:
pulumi.set(__self__, "end", end)
if fade_in_duration is not None:
pulumi.set(__self__, "fade_in_duration", fade_in_duration)
if fade_out_duration is not None:
pulumi.set(__self__, "fade_out_duration", fade_out_duration)
if opacity is not None:
pulumi.set(__self__, "opacity", opacity)
if position is not None:
pulumi.set(__self__, "position", position)
if start is not None:
pulumi.set(__self__, "start", start)
@property
@pulumi.getter(name="inputLabel")
def input_label(self) -> pulumi.Input[str]:
"""
The label of the job input which is to be used as an overlay. The Input must specify exactly one file. You can specify an image file in JPG or PNG formats, or an audio file (such as a WAV, MP3, WMA or M4A file), or a video file. See https://aka.ms/mesformats for the complete list of supported audio and video file formats.
"""
return pulumi.get(self, "input_label")
@input_label.setter
def input_label(self, value: pulumi.Input[str]):
pulumi.set(self, "input_label", value)
@property
@pulumi.getter(name="odataType")
def odata_type(self) -> pulumi.Input[str]:
"""
The discriminator for derived types.
Expected value is '#Microsoft.Media.VideoOverlay'.
"""
return pulumi.get(self, "odata_type")
@odata_type.setter
def odata_type(self, value: pulumi.Input[str]):
pulumi.set(self, "odata_type", value)
@property
@pulumi.getter(name="audioGainLevel")
def audio_gain_level(self) -> Optional[pulumi.Input[float]]:
"""
The gain level of audio in the overlay. The value should be in the range [0, 1.0]. The default is 1.0.
"""
return pulumi.get(self, "audio_gain_level")
@audio_gain_level.setter
def audio_gain_level(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "audio_gain_level", value)
@property
@pulumi.getter(name="cropRectangle")
def crop_rectangle(self) -> Optional[pulumi.Input['RectangleArgs']]:
"""
An optional rectangular window used to crop the overlay image or video.
"""
return pulumi.get(self, "crop_rectangle")
@crop_rectangle.setter
def crop_rectangle(self, value: Optional[pulumi.Input['RectangleArgs']]):
pulumi.set(self, "crop_rectangle", value)
@property
@pulumi.getter
def end(self) -> Optional[pulumi.Input[str]]:
"""
The position in the input video at which the overlay ends. The value should be in ISO 8601 duration format. For example, PT30S to end the overlay at 30 seconds in to the input video. If not specified the overlay will be applied until the end of the input video if inputLoop is true. Else, if inputLoop is false, then overlay will last as long as the duration of the overlay media.
"""
return pulumi.get(self, "end")
@end.setter
def end(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "end", value)
@property
@pulumi.getter(name="fadeInDuration")
def fade_in_duration(self) -> Optional[pulumi.Input[str]]:
"""
The duration over which the overlay fades in onto the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade in (same as PT0S).
"""
return pulumi.get(self, "fade_in_duration")
@fade_in_duration.setter
def fade_in_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fade_in_duration", value)
@property
@pulumi.getter(name="fadeOutDuration")
def fade_out_duration(self) -> Optional[pulumi.Input[str]]:
"""
The duration over which the overlay fades out of the input video. The value should be in ISO 8601 duration format. If not specified the default behavior is to have no fade out (same as PT0S).
"""
return pulumi.get(self, "fade_out_duration")
@fade_out_duration.setter
def fade_out_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fade_out_duration", value)
@property
@pulumi.getter
def opacity(self) -> Optional[pulumi.Input[float]]:
"""
The opacity of the overlay. This is a value in the range [0 - 1.0]. Default is 1.0 which mean the overlay is opaque.
"""
return pulumi.get(self, "opacity")
@opacity.setter
def opacity(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "opacity", value)
@property
@pulumi.getter
def position(self) -> Optional[pulumi.Input['RectangleArgs']]:
"""
The location in the input video where the overlay is applied.
"""
return pulumi.get(self, "position")
@position.setter
def position(self, value: Optional[pulumi.Input['RectangleArgs']]):
pulumi.set(self, "position", value)
@property
@pulumi.getter
def start(self) -> Optional[pulumi.Input[str]]:
"""
The start position, with reference to the input video, at which the overlay starts. The value should be in ISO 8601 format. For example, PT05S to start the overlay at 5 seconds in to the input video. If not specified the overlay starts from the beginning of the input video.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "start", value)
| 47.713562
| 877
| 0.682518
| 32,893
| 273,017
| 5.519503
| 0.032743
| 0.08755
| 0.05182
| 0.032233
| 0.882459
| 0.818026
| 0.766305
| 0.718363
| 0.688251
| 0.660011
| 0
| 0.004469
| 0.218177
| 273,017
| 5,721
| 878
| 47.721902
| 0.846091
| 0.361725
| 0
| 0.6006
| 1
| 0
| 0.1655
| 0.082833
| 0
| 0
| 0
| 0
| 0
| 1
| 0.211394
| false
| 0.004798
| 0.001799
| 0
| 0.332234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9e22d97b796d45a17b36afafec78186764342f8
| 607
|
py
|
Python
|
0x08-python-more_classes/8-main.py
|
darkares23/holbertonschool-higher_level_programming
|
931b1b701d8a1d990b7cd931486496c0b5502e21
|
[
"MIT"
] | null | null | null |
0x08-python-more_classes/8-main.py
|
darkares23/holbertonschool-higher_level_programming
|
931b1b701d8a1d990b7cd931486496c0b5502e21
|
[
"MIT"
] | null | null | null |
0x08-python-more_classes/8-main.py
|
darkares23/holbertonschool-higher_level_programming
|
931b1b701d8a1d990b7cd931486496c0b5502e21
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
Rectangle = __import__('8-rectangle').Rectangle
my_rectangle_1 = Rectangle(8, 4)
my_rectangle_2 = Rectangle(2, 3)
if my_rectangle_1 is Rectangle.bigger_or_equal(my_rectangle_1, my_rectangle_2):
print("my_rectangle_1 is bigger or equal to my_rectangle_2")
else:
print("my_rectangle_2 is bigger than my_rectangle_1")
my_rectangle_2.width = 10
my_rectangle_2.height = 5
if my_rectangle_1 is Rectangle.bigger_or_equal(my_rectangle_1, my_rectangle_2):
print("my_rectangle_1 is bigger or equal to my_rectangle_2")
else:
print("my_rectangle_2 is bigger than my_rectangle_1")
| 31.947368
| 79
| 0.792422
| 106
| 607
| 4.122642
| 0.226415
| 0.453089
| 0.24714
| 0.128146
| 0.727689
| 0.727689
| 0.700229
| 0.700229
| 0.700229
| 0.700229
| 0
| 0.050657
| 0.121911
| 607
| 18
| 80
| 33.722222
| 0.769231
| 0.028007
| 0
| 0.615385
| 0
| 0
| 0.341256
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0.307692
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9f95a2c795b883f265e5fe8b28b8314dd57c94a
| 17,446
|
py
|
Python
|
test_fetchinterpreter.py
|
buffis/fetch
|
2ef97c397dc189800460b2dd419a4af2c5bba374
|
[
"MIT"
] | 1
|
2015-06-03T18:05:14.000Z
|
2015-06-03T18:05:14.000Z
|
test_fetchinterpreter.py
|
buffis/fetch
|
2ef97c397dc189800460b2dd419a4af2c5bba374
|
[
"MIT"
] | null | null | null |
test_fetchinterpreter.py
|
buffis/fetch
|
2ef97c397dc189800460b2dd419a4af2c5bba374
|
[
"MIT"
] | null | null | null |
import fetchinterpreter
import unittest
from parseractions import *
from BeautifulSoup import BeautifulSoup
TEST_URL = "http://example.com/cool_api"
class TestFunctions(unittest.TestCase):
def setUp(self):
print "Testing: ", self
fetchinterpreter.VARS = {}
def test_fetchaction_get(self):
name = "test"
action = FetchAction(name, "GET", TEST_URL)
fetchinterpreter.handle_line(action)
self.assertEquals("GET", fetchinterpreter.VARS[name].method)
self.assertEquals(TEST_URL, fetchinterpreter.VARS[name].url, TEST_URL)
def test_fetchaction_post(self):
name = "test"
action = FetchAction(name, "POST", TEST_URL)
fetchinterpreter.handle_line(action)
self.assertEquals("POST", fetchinterpreter.VARS[name].method)
self.assertEquals(TEST_URL, fetchinterpreter.VARS[name].url, TEST_URL)
def test_filter_starts(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = CoarseFilterAction("x", BasicFilterExpression("starts", "hello"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(2, len(output))
self.assertTrue("hello world" in output)
self.assertTrue("hello hello" in output)
def test_filter_ends(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = CoarseFilterAction("x", BasicFilterExpression("ends", "hello"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(2, len(output))
self.assertTrue("world hello" in output)
self.assertTrue("hello hello" in output)
def test_filter_contains(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = CoarseFilterAction("x", BasicFilterExpression("contains","d he"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(1, len(output))
self.assertTrue("world hello" in output)
def test_filter_matches(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"HELLO100B",
"YO100A",
"hello100b",
"foobar"
])
action = CoarseFilterAction("x", BasicFilterExpression("matches","[A-Z]+100[ABC]"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(2, len(output))
self.assertTrue("HELLO100B" in output)
self.assertTrue("YO100A" in output)
def test_filter_length_gt(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = CoarseFilterAction("x", BasicFilterExpression("length",">5"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(3, len(output))
self.assertTrue("hello world" in output)
self.assertTrue("world hello" in output)
self.assertTrue("hello hello" in output)
def test_filter_length_lt(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = CoarseFilterAction("x", BasicFilterExpression("length","<6"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(1, len(output))
self.assertTrue("world" in output)
def test_filter_length_eq(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = CoarseFilterAction("x", BasicFilterExpression("length","=5"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(1, len(output))
self.assertTrue("world" in output)
# Fine filters below.
def test_filter_after(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = FineFilterAction("x", BasicFilterExpression("after","hell"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(4, len(output))
self.assertEquals(["o world", "o", "o hello", ""], output)
def test_filter_before(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = FineFilterAction("x", BasicFilterExpression("before","orl"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(4, len(output))
self.assertEquals(["hello w", "w", "hello hello", "w"], output)
def test_filter_afterpos(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = FineFilterAction("x", BasicFilterExpression("afterpos","3"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(4, len(output))
self.assertEquals(["lo world", "ld hello", "lo hello", "ld"], output)
def test_filter_beforepos(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = FineFilterAction("x", BasicFilterExpression("beforepos","3"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(["hel", "wor", "hel", "wor"], output)
def test_filter_exclude(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world",
"world world"
])
action = FineFilterAction("x", BasicFilterExpression("exclude","world"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(["hello ", " hello", "hello hello", "", " "], output)
def test_filter_striptags_img(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello <p>foobar</p> world",
"hello <img src='pic.jpg'/> world",
"hello <img src='pic.jpg'/><img src='pic.jpg'/> world",
"hello <p>hello</p><img src='pic.jpg'/>world",
])
action = FineFilterAction("x", BasicFilterExpression("striptags","img"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals([
"hello <p>foobar</p> world",
"hello world",
"hello world",
"hello <p>hello</p>world"], output)
def test_filter_striptags_p_and_img(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello <p>foobar</p> world",
"hello <img src='pic.jpg'/> world",
"hello <img src='pic.jpg'/><img src='pic.jpg'/> world",
"hello <p>hello</p><img src='pic.jpg'/>world",
])
action = FineFilterAction("x", BasicFilterExpression("striptags","p,img"), "y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals([
"hello world",
"hello world",
"hello world",
"hello world"], output)
def test_filter_html_children(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.HtmlWrapper(
[BeautifulSoup("<html><head><title>Page title</title></head></html>")])
action = CoarseFilterAction("x", BasicFilterExpression("children","html"), "y")
fetchinterpreter.handle_line(action)
action = FineFilterAction("x", BasicFilterExpression("rawtext",""), "x")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(
["<html><head><title>Page title</title></head></html>"], output)
def test_filter_html_children_with_class(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.HtmlWrapper(
[BeautifulSoup("<p class='a'>p1</p><p class='a'>p2</p><p>p3</p>")])
action = CoarseFilterAction("x", BasicFilterExpression("children","p.a"), "y")
fetchinterpreter.handle_line(action)
action = FineFilterAction("x", BasicFilterExpression("rawtext",""), "x")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(
['<p class="a">p1</p>', '<p class="a">p2</p>'], output)
def test_filter_html_findall_children_with_class_too_deep(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.HtmlWrapper(
[BeautifulSoup("<html><body><p class='a'>p1</p><p class='b'>p2</p><p>p3</p></body></html>")])
action = CoarseFilterAction("x", BasicFilterExpression("children","p.a"), "y")
fetchinterpreter.handle_line(action)
action = FineFilterAction("x", BasicFilterExpression("rawtext",""), "x")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals([], output)
def test_filter_html_findall_tag(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.HtmlWrapper(
[BeautifulSoup("<html><body><p>p1</p><p>p2</p><p>p3</p></body></html>")])
action = CoarseFilterAction("x", BasicFilterExpression("findall","p"), "y")
fetchinterpreter.handle_line(action)
action = FineFilterAction("x", BasicFilterExpression("rawtext",""), "x")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(
['<p>p1</p>', '<p>p2</p>', '<p>p3</p>'], output)
def test_filter_html_findall_tag_with_class(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.HtmlWrapper(
[BeautifulSoup("<html><body><p class='a'>p1</p><p class='b'>p2</p><p>p3</p></body></html>")])
action = CoarseFilterAction("x", BasicFilterExpression("findall","p.a"), "y")
fetchinterpreter.handle_line(action)
action = FineFilterAction("x", BasicFilterExpression("rawtext",""), "x")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(
['<p class="a">p1</p>'], output)
def test_filter_html_attr(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.HtmlWrapper(
[BeautifulSoup("<html><body><img src='things'/><img srcs='stuff'/></body></html>")])
action = CoarseFilterAction("x", BasicFilterExpression("findall","img"), "y")
fetchinterpreter.handle_line(action)
action = FineFilterAction("x", BasicFilterExpression("attr","src"), "x")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(
["things", ""], output)
def test_neg_coarse_filter(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"world hello",
"hello hello",
"world"
])
action = CoarseFilterAction("x",
NegFilterExpression(BasicFilterExpression("starts","hello")),
"y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(["world hello", "world"], output)
def test_combined_filter_or(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"goodbye world",
"hello hello",
"world hello",
"world goodbye"
])
action = CoarseFilterAction("x",
CombinedFilterExpression(
BasicFilterExpression("starts","hello"),
BasicFilterExpression("starts","goodbye"),
"|"
),
"y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals([
"hello world",
"goodbye world",
"hello hello"
], output)
def test_combined_filter_and(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"goodbye world",
"hello hello",
"world hello",
"hello goodbye"
])
action = CoarseFilterAction("x",
CombinedFilterExpression(
BasicFilterExpression("starts","hello"),
BasicFilterExpression("ends","goodbye"),
"&"
),
"y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals(["hello goodbye"], output)
def test_combined_filter_and_noresult(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper([
"hello world",
"goodbye world",
"hello hello",
"world hello",
"world goodbye"
])
action = CoarseFilterAction("x",
CombinedFilterExpression(
BasicFilterExpression("starts","hello"),
BasicFilterExpression("starts","goodbye"),
"&"
),
"y")
fetchinterpreter.handle_line(action)
output = fetchinterpreter.VARS["x"].output()
self.assertEquals([], output)
def test_assignment(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper(["hello world"])
action = OutputAssignment("x", "y")
fetchinterpreter.handle_line(action)
self.assertTrue("x" in fetchinterpreter.VARS)
self.assertEquals(["hello world"], fetchinterpreter.VARS["x"].output())
def test_assignment_plus(self):
fetchinterpreter.VARS["y1"] = fetchinterpreter.TextWrapper(["hello world"])
fetchinterpreter.VARS["y2"] = fetchinterpreter.TextWrapper(["goodbye world"])
action = OutputAssignment("x", ListPlus("y1", "y2"))
fetchinterpreter.handle_line(action)
self.assertTrue("x" in fetchinterpreter.VARS)
self.assertEquals(["hello world", "goodbye world"], fetchinterpreter.VARS["x"].output())
def test_assignment_valueat(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper(["hello", "world", "goodbye"])
action = OutputAssignment("x", ListAt("y", "1"))
fetchinterpreter.handle_line(action)
self.assertTrue("x" in fetchinterpreter.VARS)
self.assertEquals(["world"], fetchinterpreter.VARS["x"].output())
def test_assignment_valueat_notinrange(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper(["hello", "world", "goodbye"])
action = OutputAssignment("x", ListAt("y", "10"))
try:
fetchinterpreter.handle_line(action)
self.fail("Expected failure")
except fetchinterpreter.InterpreterException:
pass # Expected
def test_assignment_dict(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper(["hello world"])
action = OutputAssignment("x", {"z": "y"})
fetchinterpreter.handle_line(action)
self.assertTrue("x" in fetchinterpreter.VARS)
self.assertTrue("z" in fetchinterpreter.VARS["x"])
self.assertEquals(["hello world"], fetchinterpreter.VARS["x"]["z"].output())
def test_assignment_dict_lvalue(self):
fetchinterpreter.VARS["y"] = fetchinterpreter.TextWrapper(["hello world"])
fetchinterpreter.VARS["x"] = {}
action = OutputAssignment(DictAt("x","z"), "y")
fetchinterpreter.handle_line(action)
self.assertEquals(["hello world"], fetchinterpreter.VARS["x"]["z"].output())
if __name__ == '__main__':
unittest.main()
| 42.655257
| 105
| 0.585062
| 1,592
| 17,446
| 6.318467
| 0.088568
| 0.141167
| 0.098221
| 0.120887
| 0.88468
| 0.864201
| 0.805846
| 0.79153
| 0.751765
| 0.694403
| 0
| 0.004565
| 0.27181
| 17,446
| 409
| 106
| 42.655257
| 0.787232
| 0.001605
| 0
| 0.687332
| 0
| 0.010782
| 0.149231
| 0.016307
| 0
| 0
| 0
| 0
| 0.142857
| 0
| null | null | 0.002695
| 0.010782
| null | null | 0.002695
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a257d9108f5917a386b06c4b116b9fb3aecf637
| 3,681
|
py
|
Python
|
Lessons/source/queue.py
|
ericanaglik/cs13
|
6dc2dd41e0b82a43999145b226509d8fc0adb366
|
[
"MIT"
] | null | null | null |
Lessons/source/queue.py
|
ericanaglik/cs13
|
6dc2dd41e0b82a43999145b226509d8fc0adb366
|
[
"MIT"
] | 8
|
2019-04-26T06:29:56.000Z
|
2019-08-17T01:48:07.000Z
|
Lessons/source/queue.py
|
ericanaglik/cs13
|
6dc2dd41e0b82a43999145b226509d8fc0adb366
|
[
"MIT"
] | null | null | null |
#!python
from linkedlist import LinkedList
# Implement LinkedQueue below, then change the assignment at the bottom
# to use this Queue implementation to verify it passes all tests
class LinkedQueue(object):
def __init__(self, iterable=None):
"""Initialize this queue and enqueue the given items, if any."""
# Initialize a new linked list to store the items
self.list = LinkedList()
if iterable is not None:
for item in iterable:
self.enqueue(item)
def __repr__(self):
"""Return a string representation of this queue."""
return 'Queue({} items, front={})'.format(self.length(), self.front())
def is_empty(self):
"""Return True if this queue is empty, or False otherwise."""
return self.list.is_empty()
def length(self):
"""Return the number of items in this queue."""
return self.list.length()
def enqueue(self, item):
"""Insert the given item at the back of this queue.
Running time: O(1) – because we always add to the same spot and only modify one value"""
self.list.append(item)
def front(self):
"""Return the item at the front of this queue without removing it,
or None if this queue is empty."""
if self.is_empty():
return None
else:
return self.list.head.data
def dequeue(self):
"""Remove and return the item at the front of this queue,
or raise ValueError if this queue is empty.
Running time: O(1) we always add to the same spot and only modify one value"""
if self.is_empty():
raise ValueError("list is empty!")
else:
item = self.front()
self.list.delete(item)
return item
# Implement ArrayQueue below, then change the assignment at the bottom
# to use this Queue implementation to verify it passes all tests
class ArrayQueue(object):
def __init__(self, iterable=None):
"""Initialize this queue and enqueue the given items, if any."""
# Initialize a new list (dynamic array) to store the items
self.list = list()
if iterable is not None:
for item in iterable:
self.enqueue(item)
def __repr__(self):
"""Return a string representation of this queue."""
return 'Queue({} items, front={})'.format(self.length(), self.front())
def is_empty(self):
"""Return True if this queue is empty, or False otherwise."""
return len(self.list) == 0
def length(self):
"""Return the number of items in this queue."""
return len(self.list)
def enqueue(self, item):
"""Insert the given item at the back of this queue.
Running time: O(1) because we always add to the same spot and only modify one value"""
self.list.append(item)
def front(self):
"""Return the item at the front of this queue without removing it,
or None if this queue is empty."""
if self.is_empty():
return None
else:
return self.list[0]
def dequeue(self):
"""Remove and return the item at the front of this queue,
or raise ValueError if this queue is empty.
Running time: O(n) worst case because you have to change the indexes of other items in the list"""
if self.is_empty():
raise ValueError("queue is empty!")
else:
return self.list.pop(0)
# Implement LinkedQueue and ArrayQueue above, then change the assignment below
# to use each of your Queue implementations to verify they each pass all tests
Queue = ArrayQueue
| 35.057143
| 106
| 0.625645
| 515
| 3,681
| 4.429126
| 0.209709
| 0.078913
| 0.03858
| 0.034196
| 0.782552
| 0.782552
| 0.737834
| 0.737834
| 0.737834
| 0.737834
| 0
| 0.002288
| 0.287694
| 3,681
| 104
| 107
| 35.394231
| 0.867277
| 0.480033
| 0
| 0.653846
| 0
| 0
| 0.044633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.269231
| false
| 0
| 0.019231
| 0
| 0.557692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
8a6ec7955077249cc985dc817997675359089741
| 7,892
|
py
|
Python
|
datasets/dataset.py
|
JacobGump/TorchSSL
|
bb6978d6e67940eb3d28d99c45d0f2960355b972
|
[
"MIT"
] | null | null | null |
datasets/dataset.py
|
JacobGump/TorchSSL
|
bb6978d6e67940eb3d28d99c45d0f2960355b972
|
[
"MIT"
] | null | null | null |
datasets/dataset.py
|
JacobGump/TorchSSL
|
bb6978d6e67940eb3d28d99c45d0f2960355b972
|
[
"MIT"
] | null | null | null |
from torchvision import transforms
from torch.utils.data import Dataset
from .data_utils import get_onehot
from .augmentation.randaugment import RandAugment
import torchvision
from PIL import Image
import numpy as np
import copy
class BasicDataset(Dataset):
"""
BasicDataset returns a pair of image and labels (targets).
If targets are not given, BasicDataset returns None as the label.
This class supports strong augmentation for Fixmatch,
and return both weakly and strongly augmented images.
"""
def __init__(self,
alg,
data,
targets=None,
num_classes=None,
transform=None,
is_ulb=False,
strong_transform=None,
onehot=False,
*args, **kwargs):
"""
Args
data: x_data
targets: y_data (if not exist, None)
num_classes: number of label classes
transform: basic transformation of data
use_strong_transform: If True, this dataset returns both weakly and strongly augmented images.
strong_transform: list of transformation functions for strong augmentation
onehot: If True, label is converted into onehot vector.
"""
super(BasicDataset, self).__init__()
self.alg = alg
self.data = data
self.targets = targets
self.num_classes = num_classes
self.is_ulb = is_ulb
self.onehot = onehot
self.transform = transform
if self.is_ulb:
if strong_transform is None:
self.strong_transform = copy.deepcopy(transform)
self.strong_transform.transforms.insert(0, RandAugment(3, 5))
else:
self.strong_transform = strong_transform
def __getitem__(self, idx):
"""
If strong augmentation is not used,
return weak_augment_image, target
else:
return weak_augment_image, strong_augment_image, target
"""
# set idx-th target
if self.targets is None:
target = None
else:
target_ = self.targets[idx]
target = target_ if not self.onehot else get_onehot(self.num_classes, target_)
# set augmented images
img = self.data[idx]
if self.transform is None:
return transforms.ToTensor()(img), target
else:
if isinstance(img, np.ndarray):
img = Image.fromarray(img)
img_w = self.transform(img)
if not self.is_ulb:
return idx, img_w, target
else:
if self.alg == 'fixmatch':
return idx, img_w, self.strong_transform(img)
elif self.alg == 'flexmatch':
return idx, img_w, self.strong_transform(img)
elif self.alg == 'pimodel':
return idx, img_w, self.transform(img)
elif self.alg == 'pseudolabel':
return idx, img_w
elif self.alg == 'vat':
return idx, img_w
elif self.alg == 'meanteacher':
return idx, img_w, self.transform(img)
elif self.alg == 'uda':
return idx, img_w, self.strong_transform(img)
elif self.alg == 'mixmatch':
return idx, img_w, self.transform(img)
elif self.alg == 'remixmatch':
rotate_v_list = [0, 90, 180, 270]
rotate_v1 = np.random.choice(rotate_v_list, 1).item()
img_s1 = self.strong_transform(img)
img_s1_rot = torchvision.transforms.functional.rotate(img_s1, rotate_v1)
img_s2 = self.strong_transform(img)
return idx, img_w, img_s1, img_s2, img_s1_rot, rotate_v_list.index(rotate_v1)
elif self.alg == 'fullysupervised':
return idx
def __len__(self):
return len(self.data)
class CustomCifar10(Dataset):
"""
BasicDataset returns a pair of image and labels (targets).
If targets are not given, BasicDataset returns None as the label.
This class supports strong augmentation for Fixmatch,
and return both weakly and strongly augmented images.
"""
def __init__(self,
alg,
data,
targets=None,
num_classes=None,
transform=None,
is_ulb=False,
strong_transform=None,
onehot=False,
*args, **kwargs):
"""
Args
data: x_data
targets: y_data (if not exist, None)
num_classes: number of label classes
transform: basic transformation of data
use_strong_transform: If True, this dataset returns both weakly and strongly augmented images.
strong_transform: list of transformation functions for strong augmentation
onehot: If True, label is converted into onehot vector.
"""
super(BasicDataset, self).__init__()
self.alg = alg
self.data = data
self.targets = targets
self.num_classes = num_classes
self.is_ulb = is_ulb
self.onehot = onehot
self.transform = transform
if self.is_ulb:
if strong_transform is None:
self.strong_transform = copy.deepcopy(transform)
self.strong_transform.transforms.insert(0, RandAugment(3, 5))
else:
self.strong_transform = strong_transform
def __getitem__(self, idx):
"""
If strong augmentation is not used,
return weak_augment_image, target
else:
return weak_augment_image, strong_augment_image, target
"""
# set idx-th target
if self.targets is None:
target = None
else:
target_ = self.targets[idx]
target = target_ if not self.onehot else get_onehot(self.num_classes, target_)
# set augmented images
img = self.data[idx]
if self.transform is None:
return transforms.ToTensor()(img), target
else:
img_w = self.transform(img)
if not self.is_ulb:
return idx, img_w, target
else:
if self.alg == 'fixmatch':
return idx, img_w, self.strong_transform(img)
elif self.alg == 'flexmatch':
return idx, img_w, self.strong_transform(img)
elif self.alg == 'pimodel':
return idx, img_w, self.transform(img)
elif self.alg == 'pseudolabel':
return idx, img_w
elif self.alg == 'vat':
return idx, img_w
elif self.alg == 'meanteacher':
return idx, img_w, self.transform(img)
elif self.alg == 'uda':
return idx, img_w, self.strong_transform(img)
elif self.alg == 'mixmatch':
return idx, img_w, self.transform(img)
elif self.alg == 'remixmatch':
rotate_v_list = [0, 90, 180, 270]
rotate_v1 = np.random.choice(rotate_v_list, 1).item()
img_s1 = self.strong_transform(img)
img_s1_rot = torchvision.transforms.functional.rotate(img_s1, rotate_v1)
img_s2 = self.strong_transform(img)
return idx, img_w, img_s1, img_s2, img_s1_rot, rotate_v_list.index(rotate_v1)
elif self.alg == 'fullysupervised':
return idx
def __len__(self):
return len(self.data)
| 37.226415
| 106
| 0.553599
| 880
| 7,892
| 4.778409
| 0.134091
| 0.092747
| 0.057075
| 0.061831
| 0.935077
| 0.935077
| 0.935077
| 0.935077
| 0.935077
| 0.935077
| 0
| 0.009679
| 0.371642
| 7,892
| 211
| 107
| 37.402844
| 0.838274
| 0.20223
| 0
| 0.915493
| 0
| 0
| 0.0284
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042254
| false
| 0
| 0.056338
| 0.014085
| 0.295775
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76dbb23c2fc5e8e5832157993f694ee5148c07b4
| 148,698
|
py
|
Python
|
lib/installed_clients/SetAPIServiceClient.py
|
kbaseapps/DifferentialExpressionUtils
|
2172c1d7afd2da00d0817161345920665a013ebb
|
[
"MIT"
] | null | null | null |
lib/installed_clients/SetAPIServiceClient.py
|
kbaseapps/DifferentialExpressionUtils
|
2172c1d7afd2da00d0817161345920665a013ebb
|
[
"MIT"
] | 3
|
2017-08-29T17:13:43.000Z
|
2018-06-05T14:51:01.000Z
|
lib/installed_clients/SetAPIServiceClient.py
|
kbaseapps/DifferentialExpressionUtils
|
2172c1d7afd2da00d0817161345920665a013ebb
|
[
"MIT"
] | 7
|
2017-07-13T16:45:48.000Z
|
2018-09-11T14:35:32.000Z
|
# -*- coding: utf-8 -*-
############################################################
#
# Autogenerated by the KBase type compiler -
# any changes made here will be overwritten
#
############################################################
from __future__ import print_function
# the following is a hack to get the baseclient to import whether we're in a
# package or not. This makes pep8 unhappy hence the annotations.
try:
# baseclient and this client are in a package
from .baseclient import BaseClient as _BaseClient # @UnusedImport
except ImportError:
# no they aren't
from baseclient import BaseClient as _BaseClient # @Reimport
class SetAPI(object):
def __init__(
self, url=None, timeout=30 * 60, user_id=None,
password=None, token=None, ignore_authrc=False,
trust_all_ssl_certificates=False,
auth_svc='https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login',
service_ver='release'):
if url is None:
url = 'https://kbase.us/services/service_wizard'
self._service_ver = service_ver
self._client = _BaseClient(
url, timeout=timeout, user_id=user_id, password=password,
token=token, ignore_authrc=ignore_authrc,
trust_all_ssl_certificates=trust_all_ssl_certificates,
auth_svc=auth_svc,
lookup_url=True)
def get_differential_expression_matrix_set_v1(self, params, context=None):
"""
:param params: instance of type
"GetDifferentialExpressionMatrixSetV1Params" (ref - workspace
reference to DifferentialExpressionMatrixSet object.
include_item_info - 1 or 0, if 1 additionally provides workspace
info (with metadata) for each DifferentialExpressionMatrix object
in the Set include_set_item_ref_paths - 1 or 0, if 1, additionally
provides ref_path for each item in the set. The ref_path returned
for each item is either ref_path_to_set;item_ref (if
ref_path_to_set is given) or set_ref;item_ref (if ref_path_to_set
is not given)) -> structure: parameter "ref" of String, parameter
"include_item_info" of type "boolean" (A boolean. 0 = false, 1 =
true.), parameter "include_set_item_ref_paths" of type "boolean"
(A boolean. 0 = false, 1 = true.), parameter "ref_path_to_set" of
list of String
:returns: instance of type
"GetDifferentialExpressionMatrixSetV1Result" -> structure:
parameter "data" of type "DifferentialExpressionMatrixSet" (When
building a DifferentialExpressionMatrixSet, all
DifferentialExpressionMatrices must be built against the same
genome. This is not part of the object type, but enforced during a
call to save_differential_expression_matrix_set_v1. @meta ws
description as description @meta ws length(items) as item_count)
-> structure: parameter "description" of String, parameter "items"
of list of type "DifferentialExpressionMatrixSetItem" (When saving
a DifferentialExpressionMatrixSet, only 'ref' is required. You
should never set 'info'. 'info' is provided optionally when
fetching the DifferentialExpressionMatrixSet. ref_path is
optionally returned by get_differential_expression_matrix_set_v1()
when its input parameter 'include_set_item_ref_paths' is set to
1.) -> structure: parameter "ref" of type "ws_diffexpmatrix_id"
(The workspace id for a FeatureSet data object. @id ws
KBaseFeatureValues.DifferentialExpressionMatrix;), parameter
"ref_path" of type "ws_diffexpmatrix_id" (The workspace id for a
FeatureSet data object. @id ws
KBaseFeatureValues.DifferentialExpressionMatrix;), parameter
"label" of String, parameter "info" of type "object_info"
(Information about an object, including user provided metadata.
obj_id objid - the numerical id of the object. obj_name name - the
name of the object. type_string type - the type of the object.
timestamp save_date - the save date of the object. obj_ver ver -
the version of the object. username saved_by - the user that saved
or copied the object. ws_id wsid - the workspace containing the
object. ws_name workspace - the workspace containing the object.
string chsum - the md5 checksum of the object. int size - the size
of the object in bytes. usermeta meta - arbitrary user-supplied
metadata about the object.) -> tuple of size 11: parameter "objid"
of type "obj_id" (The unique, permanent numerical ID of an
object.), parameter "name" of type "obj_name" (A string used as a
name for an object. Any string consisting of alphanumeric
characters and the characters |._- that is not an integer is
acceptable.), parameter "type" of type "type_string" (A type
string. Specifies the type and its version in a single string in
the format [module].[typename]-[major].[minor]: module - a string.
The module name of the typespec containing the type. typename - a
string. The name of the type as assigned by the typedef statement.
major - an integer. The major version of the type. A change in the
major version implies the type has changed in a non-backwards
compatible way. minor - an integer. The minor version of the type.
A change in the minor version implies that the type has changed in
a way that is backwards compatible with previous type definitions.
In many cases, the major and minor versions are optional, and if
not provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter "info" of
type "object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.get_differential_expression_matrix_set_v1',
[params], self._service_ver, context)
def save_differential_expression_matrix_set_v1(self, params, context=None):
"""
:param params: instance of type
"SaveDifferentialExpressionMatrixSetV1Params" (workspace_name or
workspace_id - alternative options defining target workspace,
output_object_name - workspace object name (this parameter is used
together with one of workspace params from above)) -> structure:
parameter "workspace" of String, parameter "output_object_name" of
String, parameter "data" of type "DifferentialExpressionMatrixSet"
(When building a DifferentialExpressionMatrixSet, all
DifferentialExpressionMatrices must be built against the same
genome. This is not part of the object type, but enforced during a
call to save_differential_expression_matrix_set_v1. @meta ws
description as description @meta ws length(items) as item_count)
-> structure: parameter "description" of String, parameter "items"
of list of type "DifferentialExpressionMatrixSetItem" (When saving
a DifferentialExpressionMatrixSet, only 'ref' is required. You
should never set 'info'. 'info' is provided optionally when
fetching the DifferentialExpressionMatrixSet. ref_path is
optionally returned by get_differential_expression_matrix_set_v1()
when its input parameter 'include_set_item_ref_paths' is set to
1.) -> structure: parameter "ref" of type "ws_diffexpmatrix_id"
(The workspace id for a FeatureSet data object. @id ws
KBaseFeatureValues.DifferentialExpressionMatrix;), parameter
"ref_path" of type "ws_diffexpmatrix_id" (The workspace id for a
FeatureSet data object. @id ws
KBaseFeatureValues.DifferentialExpressionMatrix;), parameter
"label" of String, parameter "info" of type "object_info"
(Information about an object, including user provided metadata.
obj_id objid - the numerical id of the object. obj_name name - the
name of the object. type_string type - the type of the object.
timestamp save_date - the save date of the object. obj_ver ver -
the version of the object. username saved_by - the user that saved
or copied the object. ws_id wsid - the workspace containing the
object. ws_name workspace - the workspace containing the object.
string chsum - the md5 checksum of the object. int size - the size
of the object in bytes. usermeta meta - arbitrary user-supplied
metadata about the object.) -> tuple of size 11: parameter "objid"
of type "obj_id" (The unique, permanent numerical ID of an
object.), parameter "name" of type "obj_name" (A string used as a
name for an object. Any string consisting of alphanumeric
characters and the characters |._- that is not an integer is
acceptable.), parameter "type" of type "type_string" (A type
string. Specifies the type and its version in a single string in
the format [module].[typename]-[major].[minor]: module - a string.
The module name of the typespec containing the type. typename - a
string. The name of the type as assigned by the typedef statement.
major - an integer. The major version of the type. A change in the
major version implies the type has changed in a non-backwards
compatible way. minor - an integer. The minor version of the type.
A change in the minor version implies that the type has changed in
a way that is backwards compatible with previous type definitions.
In many cases, the major and minor versions are optional, and if
not provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
:returns: instance of type
"SaveDifferentialExpressionMatrixSetV1Result" -> structure:
parameter "set_ref" of String, parameter "set_info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.save_differential_expression_matrix_set_v1',
[params], self._service_ver, context)
def get_feature_set_set_v1(self, params, context=None):
"""
:param params: instance of type "GetFeatureSetSetV1Params" (ref -
workspace reference to FeatureSetSet object. include_item_info - 1
or 0, if 1 additionally provides workspace info (with metadata)
for each FeatureSet object in the Set include_set_item_ref_paths -
1 or 0, if 1, additionally provides ref_path for each item in the
set. The ref_path returned for each item is either
ref_path_to_set;item_ref (if ref_path_to_set is given) or
set_ref;item_ref (if ref_path_to_set is not given)) -> structure:
parameter "ref" of String, parameter "include_item_info" of type
"boolean" (A boolean. 0 = false, 1 = true.), parameter
"include_set_item_ref_paths" of type "boolean" (A boolean. 0 =
false, 1 = true.), parameter "ref_path_to_set" of list of String
:returns: instance of type "GetFeatureSetSetV1Result" -> structure:
parameter "data" of type "FeatureSetSet" (When building a
FeatureSetSet, all FeatureSets must be aligned against the same
genome. This is not part of the object type, but enforced during a
call to save_feature_set_set_v1. @meta ws description as
description @meta ws length(items) as item_count) -> structure:
parameter "description" of String, parameter "items" of list of
type "FeatureSetSetItem" (When saving a FeatureSetSet, only 'ref'
is required. You should never set 'info'. 'info' is provided
optionally when fetching the FeatureSetSet. ref_path is optionally
returned by get_feature_set_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_feature_set_id" (The workspace id for a
FeatureSet data object. @id ws KBaseCollections.FeatureSet),
parameter "ref_path" of type "ws_feature_set_id" (The workspace id
for a FeatureSet data object. @id ws KBaseCollections.FeatureSet),
parameter "label" of String, parameter "info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String, parameter "info" of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
return self._client.call_method(
'SetAPI.get_feature_set_set_v1',
[params], self._service_ver, context)
def save_feature_set_set_v1(self, params, context=None):
"""
:param params: instance of type "SaveFeatureSetSetV1Params"
(workspace_name or workspace_id - alternative options defining
target workspace, output_object_name - workspace object name (this
parameter is used together with one of workspace params from
above)) -> structure: parameter "workspace" of String, parameter
"output_object_name" of String, parameter "data" of type
"FeatureSetSet" (When building a FeatureSetSet, all FeatureSets
must be aligned against the same genome. This is not part of the
object type, but enforced during a call to
save_feature_set_set_v1. @meta ws description as description @meta
ws length(items) as item_count) -> structure: parameter
"description" of String, parameter "items" of list of type
"FeatureSetSetItem" (When saving a FeatureSetSet, only 'ref' is
required. You should never set 'info'. 'info' is provided
optionally when fetching the FeatureSetSet. ref_path is optionally
returned by get_feature_set_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_feature_set_id" (The workspace id for a
FeatureSet data object. @id ws KBaseCollections.FeatureSet),
parameter "ref_path" of type "ws_feature_set_id" (The workspace id
for a FeatureSet data object. @id ws KBaseCollections.FeatureSet),
parameter "label" of String, parameter "info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
:returns: instance of type "SaveFeatureSetSetV1Result" -> structure:
parameter "set_ref" of String, parameter "set_info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.save_feature_set_set_v1',
[params], self._service_ver, context)
def get_expression_set_v1(self, params, context=None):
"""
:param params: instance of type "GetExpressionSetV1Params" (ref -
workspace reference to ExpressionSet object. include_item_info - 1
or 0, if 1 additionally provides workspace info (with metadata)
for each Expression object in the Set include_set_item_ref_paths -
1 or 0, if 1, additionally provides ref_path for each item in the
set. The ref_path returned for each item is either
ref_path_to_set;item_ref (if ref_path_to_set is given) or
set_ref;item_ref (if ref_path_to_set is not given)) -> structure:
parameter "ref" of String, parameter "include_item_info" of type
"boolean" (A boolean. 0 = false, 1 = true.), parameter
"include_set_item_ref_paths" of type "boolean" (A boolean. 0 =
false, 1 = true.), parameter "ref_path_to_set" of list of String
:returns: instance of type "GetExpressionSetV1Result" -> structure:
parameter "data" of type "ExpressionSet" (When building a
ExpressionSet, all Expression objects must be aligned against the
same genome. This is not part of the object type, but enforced
during a call to save_expression_set_v1. @meta ws description as
description @meta ws length(items) as item_count) -> structure:
parameter "description" of String, parameter "items" of list of
type "ExpressionSetItem" (When saving a ExpressionSet, only 'ref'
is required. You should never set 'info'. 'info' is provided
optionally when fetching the ExpressionSet. ref_path is optionally
returned by get_expression_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_expression_id" (The workspace id for a
ReadsAlignment data object. @id ws KBaseRNASeq.RNASeqExpression),
parameter "ref_path" of type "ws_expression_id" (The workspace id
for a ReadsAlignment data object. @id ws
KBaseRNASeq.RNASeqExpression), parameter "label" of String,
parameter "data_attachments" of list of type "DataAttachment" ->
structure: parameter "name" of String, parameter "ref" of type
"ws_obj_id" (The workspace ID for a any data object. @id ws),
parameter "info" of type "object_info" (Information about an
object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter "info" of
type "object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.get_expression_set_v1',
[params], self._service_ver, context)
def save_expression_set_v1(self, params, context=None):
"""
:param params: instance of type "SaveExpressionSetV1Params"
(workspace_name or workspace_id - alternative options defining
target workspace, output_object_name - workspace object name (this
parameter is used together with one of workspace params from
above)) -> structure: parameter "workspace" of String, parameter
"output_object_name" of String, parameter "data" of type
"ExpressionSet" (When building a ExpressionSet, all Expression
objects must be aligned against the same genome. This is not part
of the object type, but enforced during a call to
save_expression_set_v1. @meta ws description as description @meta
ws length(items) as item_count) -> structure: parameter
"description" of String, parameter "items" of list of type
"ExpressionSetItem" (When saving a ExpressionSet, only 'ref' is
required. You should never set 'info'. 'info' is provided
optionally when fetching the ExpressionSet. ref_path is optionally
returned by get_expression_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_expression_id" (The workspace id for a
ReadsAlignment data object. @id ws KBaseRNASeq.RNASeqExpression),
parameter "ref_path" of type "ws_expression_id" (The workspace id
for a ReadsAlignment data object. @id ws
KBaseRNASeq.RNASeqExpression), parameter "label" of String,
parameter "data_attachments" of list of type "DataAttachment" ->
structure: parameter "name" of String, parameter "ref" of type
"ws_obj_id" (The workspace ID for a any data object. @id ws),
parameter "info" of type "object_info" (Information about an
object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
:returns: instance of type "SaveExpressionSetV1Result" -> structure:
parameter "set_ref" of String, parameter "set_info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.save_expression_set_v1',
[params], self._service_ver, context)
def get_reads_alignment_set_v1(self, params, context=None):
"""
:param params: instance of type "GetReadsAlignmentSetV1Params" (ref -
workspace reference to ReadsAlignmentSet object. include_item_info
- 1 or 0, if 1 additionally provides workspace info (with
metadata) for each ReadsAlignment object in the Set
include_set_item_ref_paths - 1 or 0, if 1, additionally provides
ref_path for each item in the set. The ref_path returned for each
item is either ref_path_to_set;item_ref (if ref_path_to_set is
given) or set_ref;item_ref (if ref_path_to_set is not given)) ->
structure: parameter "ref" of String, parameter
"include_item_info" of type "boolean" (A boolean. 0 = false, 1 =
true.), parameter "include_set_item_ref_paths" of type "boolean"
(A boolean. 0 = false, 1 = true.), parameter "ref_path_to_set" of
list of String
:returns: instance of type "GetReadsAlignmentSetV1Result" ->
structure: parameter "data" of type "ReadsAlignmentSet" (When
building a ReadsAlignmentSet, all ReadsAlignments must be aligned
against the same genome. This is not part of the object type, but
enforced during a call to save_reads_alignment_set_v1. @meta ws
description as description @meta ws length(items) as item_count)
-> structure: parameter "description" of String, parameter "items"
of list of type "ReadsAlignmentSetItem" (When saving a
ReadsAlignmentSet, only 'ref' is required. You should never set
'info'. 'info' is provided optionally when fetching the
ReadsAlignmentSet. ref_path is optionally returned by
get_reads_alignment_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_reads_align_id" (The workspace id for a
ReadsAlignment data object. @id ws KBaseRNASeq.RNASeqAlignment),
parameter "ref_path" of type "ws_reads_align_id" (The workspace id
for a ReadsAlignment data object. @id ws
KBaseRNASeq.RNASeqAlignment), parameter "label" of String,
parameter "info" of type "object_info" (Information about an
object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter
"data_attachments" of list of type "DataAttachment" -> structure:
parameter "name" of String, parameter "ref" of type "ws_obj_id"
(The workspace ID for a any data object. @id ws), parameter "info"
of type "object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.get_reads_alignment_set_v1',
[params], self._service_ver, context)
def save_reads_alignment_set_v1(self, params, context=None):
"""
:param params: instance of type "SaveReadsAlignmentSetV1Params"
(workspace_name or workspace_id - alternative options defining
target workspace, output_object_name - workspace object name (this
parameter is used together with one of workspace params from
above)) -> structure: parameter "workspace" of String, parameter
"output_object_name" of String, parameter "data" of type
"ReadsAlignmentSet" (When building a ReadsAlignmentSet, all
ReadsAlignments must be aligned against the same genome. This is
not part of the object type, but enforced during a call to
save_reads_alignment_set_v1. @meta ws description as description
@meta ws length(items) as item_count) -> structure: parameter
"description" of String, parameter "items" of list of type
"ReadsAlignmentSetItem" (When saving a ReadsAlignmentSet, only
'ref' is required. You should never set 'info'. 'info' is
provided optionally when fetching the ReadsAlignmentSet. ref_path
is optionally returned by get_reads_alignment_set_v1() when its
input parameter 'include_set_item_ref_paths' is set to 1.) ->
structure: parameter "ref" of type "ws_reads_align_id" (The
workspace id for a ReadsAlignment data object. @id ws
KBaseRNASeq.RNASeqAlignment), parameter "ref_path" of type
"ws_reads_align_id" (The workspace id for a ReadsAlignment data
object. @id ws KBaseRNASeq.RNASeqAlignment), parameter "label" of
String, parameter "info" of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter
"data_attachments" of list of type "DataAttachment" -> structure:
parameter "name" of String, parameter "ref" of type "ws_obj_id"
(The workspace ID for a any data object. @id ws)
:returns: instance of type "SaveReadsAlignmentSetV1Result" ->
structure: parameter "set_ref" of String, parameter "set_info" of
type "object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.save_reads_alignment_set_v1',
[params], self._service_ver, context)
def get_reads_set_v1(self, params, context=None):
"""
:param params: instance of type "GetReadsSetV1Params" (ref -
workspace reference to ReadsGroup object. include_item_info - 1 or
0, if 1 additionally provides workspace info (with metadata) for
each Reads object in the Set include_set_item_ref_paths - 1 or 0,
if 1, additionally provides ref_path for each item in the set. The
ref_path returned for each item is either ref_path_to_set;item_ref
(if ref_path_to_set is given) or set_ref;item_ref (if
ref_path_to_set is not given)) -> structure: parameter "ref" of
String, parameter "include_item_info" of type "boolean" (A
boolean. 0 = false, 1 = true.), parameter
"include_set_item_ref_paths" of type "boolean" (A boolean. 0 =
false, 1 = true.), parameter "ref_path_to_set" of list of String
:returns: instance of type "GetReadsSetV1Result" -> structure:
parameter "data" of type "ReadsSet" (@meta ws description as
description @meta ws length(items) as item_count) -> structure:
parameter "description" of String, parameter "items" of list of
type "ReadsSetItem" (When saving a ReadsSet, only 'ref' is
required. You should never set 'info'. 'info' is provided
optionally when fetching the ReadsSet. ref_path is optionally
returned by get_reads_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_reads_id" (The workspace ID for a Reads data
object. @id ws KBaseFile.PairedEndLibrary
KBaseFile.SingleEndLibrary), parameter "ref_path" of type
"ws_reads_id" (The workspace ID for a Reads data object. @id ws
KBaseFile.PairedEndLibrary KBaseFile.SingleEndLibrary), parameter
"label" of String, parameter "data_attachments" of list of type
"DataAttachment" -> structure: parameter "name" of String,
parameter "ref" of type "ws_obj_id" (The workspace ID for a any
data object. @id ws), parameter "info" of type "object_info"
(Information about an object, including user provided metadata.
obj_id objid - the numerical id of the object. obj_name name - the
name of the object. type_string type - the type of the object.
timestamp save_date - the save date of the object. obj_ver ver -
the version of the object. username saved_by - the user that saved
or copied the object. ws_id wsid - the workspace containing the
object. ws_name workspace - the workspace containing the object.
string chsum - the md5 checksum of the object. int size - the size
of the object in bytes. usermeta meta - arbitrary user-supplied
metadata about the object.) -> tuple of size 11: parameter "objid"
of type "obj_id" (The unique, permanent numerical ID of an
object.), parameter "name" of type "obj_name" (A string used as a
name for an object. Any string consisting of alphanumeric
characters and the characters |._- that is not an integer is
acceptable.), parameter "type" of type "type_string" (A type
string. Specifies the type and its version in a single string in
the format [module].[typename]-[major].[minor]: module - a string.
The module name of the typespec containing the type. typename - a
string. The name of the type as assigned by the typedef statement.
major - an integer. The major version of the type. A change in the
major version implies the type has changed in a non-backwards
compatible way. minor - an integer. The minor version of the type.
A change in the minor version implies that the type has changed in
a way that is backwards compatible with previous type definitions.
In many cases, the major and minor versions are optional, and if
not provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter "info" of
type "object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.get_reads_set_v1',
[params], self._service_ver, context)
def save_reads_set_v1(self, params, context=None):
"""
:param params: instance of type "SaveReadsSetV1Params"
(workspace_name or workspace_id - alternative options defining
target workspace, output_object_name - workspace object name (this
parameter is used together with one of workspace params from
above)) -> structure: parameter "workspace" of String, parameter
"output_object_name" of String, parameter "data" of type
"ReadsSet" (@meta ws description as description @meta ws
length(items) as item_count) -> structure: parameter "description"
of String, parameter "items" of list of type "ReadsSetItem" (When
saving a ReadsSet, only 'ref' is required. You should never set
'info'. 'info' is provided optionally when fetching the ReadsSet.
ref_path is optionally returned by get_reads_set_v1() when its
input parameter 'include_set_item_ref_paths' is set to 1.) ->
structure: parameter "ref" of type "ws_reads_id" (The workspace ID
for a Reads data object. @id ws KBaseFile.PairedEndLibrary
KBaseFile.SingleEndLibrary), parameter "ref_path" of type
"ws_reads_id" (The workspace ID for a Reads data object. @id ws
KBaseFile.PairedEndLibrary KBaseFile.SingleEndLibrary), parameter
"label" of String, parameter "data_attachments" of list of type
"DataAttachment" -> structure: parameter "name" of String,
parameter "ref" of type "ws_obj_id" (The workspace ID for a any
data object. @id ws), parameter "info" of type "object_info"
(Information about an object, including user provided metadata.
obj_id objid - the numerical id of the object. obj_name name - the
name of the object. type_string type - the type of the object.
timestamp save_date - the save date of the object. obj_ver ver -
the version of the object. username saved_by - the user that saved
or copied the object. ws_id wsid - the workspace containing the
object. ws_name workspace - the workspace containing the object.
string chsum - the md5 checksum of the object. int size - the size
of the object in bytes. usermeta meta - arbitrary user-supplied
metadata about the object.) -> tuple of size 11: parameter "objid"
of type "obj_id" (The unique, permanent numerical ID of an
object.), parameter "name" of type "obj_name" (A string used as a
name for an object. Any string consisting of alphanumeric
characters and the characters |._- that is not an integer is
acceptable.), parameter "type" of type "type_string" (A type
string. Specifies the type and its version in a single string in
the format [module].[typename]-[major].[minor]: module - a string.
The module name of the typespec containing the type. typename - a
string. The name of the type as assigned by the typedef statement.
major - an integer. The major version of the type. A change in the
major version implies the type has changed in a non-backwards
compatible way. minor - an integer. The minor version of the type.
A change in the minor version implies that the type has changed in
a way that is backwards compatible with previous type definitions.
In many cases, the major and minor versions are optional, and if
not provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
:returns: instance of type "SaveReadsSetV1Result" -> structure:
parameter "set_ref" of String, parameter "set_info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.save_reads_set_v1',
[params], self._service_ver, context)
def get_assembly_set_v1(self, params, context=None):
"""
:param params: instance of type "GetAssemblySetV1Params" (ref -
workspace reference to AssemblyGroup object. include_item_info - 1
or 0, if 1 additionally provides workspace info (with metadata)
for each Assembly object in the Set include_set_item_ref_paths - 1
or 0, if 1, additionally provides ref_path for each item in the
set. The ref_path returned for each item is either
ref_path_to_set;item_ref (if ref_path_to_set is given) or
set_ref;item_ref (if ref_path_to_set is not given)) -> structure:
parameter "ref" of String, parameter "include_item_info" of type
"boolean" (A boolean. 0 = false, 1 = true.), parameter
"include_set_item_ref_paths" of type "boolean" (A boolean. 0 =
false, 1 = true.), parameter "ref_path_to_set" of list of String
:returns: instance of type "GetAssemblySetV1Result" -> structure:
parameter "data" of type "AssemblySet" (@meta ws description as
description @meta ws length(items) as item_count) -> structure:
parameter "description" of String, parameter "items" of list of
type "AssemblySetItem" (When saving an AssemblySet, only 'ref' is
required. You should never set 'info'. 'info' is provided
optionally when fetching the AssemblySet. ref_path is optionally
returned by get_assembly_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_assembly_id" (The workspace ID for an Assembly
object. @id ws KBaseGenomeAnnotations.Assembly), parameter
"ref_path" of type "ws_assembly_id" (The workspace ID for an
Assembly object. @id ws KBaseGenomeAnnotations.Assembly),
parameter "label" of String, parameter "info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String, parameter "info" of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
return self._client.call_method(
'SetAPI.get_assembly_set_v1',
[params], self._service_ver, context)
def save_assembly_set_v1(self, params, context=None):
"""
:param params: instance of type "SaveAssemblySetV1Params"
(workspace_name or workspace_id - alternative options defining
target workspace, output_object_name - workspace object name (this
parameter is used together with one of workspace params from
above)) -> structure: parameter "workspace" of String, parameter
"output_object_name" of String, parameter "data" of type
"AssemblySet" (@meta ws description as description @meta ws
length(items) as item_count) -> structure: parameter "description"
of String, parameter "items" of list of type "AssemblySetItem"
(When saving an AssemblySet, only 'ref' is required. You should
never set 'info'. 'info' is provided optionally when fetching the
AssemblySet. ref_path is optionally returned by
get_assembly_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_assembly_id" (The workspace ID for an Assembly
object. @id ws KBaseGenomeAnnotations.Assembly), parameter
"ref_path" of type "ws_assembly_id" (The workspace ID for an
Assembly object. @id ws KBaseGenomeAnnotations.Assembly),
parameter "label" of String, parameter "info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
:returns: instance of type "SaveAssemblySetV1Result" -> structure:
parameter "set_ref" of String, parameter "set_info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.save_assembly_set_v1',
[params], self._service_ver, context)
def get_genome_set_v1(self, params, context=None):
"""
:param params: instance of type "GetGenomeSetV1Params" (ref -
workspace reference to GenomeGroup object. include_item_info - 1
or 0, if 1 additionally provides workspace info (with metadata)
for each Genome object in the Set include_set_item_ref_paths - 1
or 0, if 1, additionally provides ref_path for each item in the
set. The ref_path for each item is either ref_path_to_set;item_ref
(if ref_path_to_set is given) or set_ref;item_ref) -> structure:
parameter "ref" of String, parameter "include_item_info" of type
"boolean" (A boolean. 0 = false, 1 = true.), parameter
"include_set_item_ref_paths" of type "boolean" (A boolean. 0 =
false, 1 = true.), parameter "ref_path_to_set" of list of String
:returns: instance of type "GetGenomeSetV1Result" -> structure:
parameter "data" of type "GenomeSet" (@meta ws description as
description @meta ws length(items) as item_count) -> structure:
parameter "description" of String, parameter "items" of list of
type "GenomeSetItem" (When saving an GenomeSet, only 'ref' is
required. You should never set 'info'. 'info' is provided
optionally when fetching the GenomeSet. ref_path is optionally
returned by get_genome_set_v1() when its input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_genome_id" (The workspace ID for a Genome
object. @id ws KBaseGenomes.Genome), parameter "ref_path" of type
"ws_genome_id" (The workspace ID for a Genome object. @id ws
KBaseGenomes.Genome), parameter "label" of String, parameter
"info" of type "object_info" (Information about an object,
including user provided metadata. obj_id objid - the numerical id
of the object. obj_name name - the name of the object. type_string
type - the type of the object. timestamp save_date - the save date
of the object. obj_ver ver - the version of the object. username
saved_by - the user that saved or copied the object. ws_id wsid -
the workspace containing the object. ws_name workspace - the
workspace containing the object. string chsum - the md5 checksum
of the object. int size - the size of the object in bytes.
usermeta meta - arbitrary user-supplied metadata about the
object.) -> tuple of size 11: parameter "objid" of type "obj_id"
(The unique, permanent numerical ID of an object.), parameter
"name" of type "obj_name" (A string used as a name for an object.
Any string consisting of alphanumeric characters and the
characters |._- that is not an integer is acceptable.), parameter
"type" of type "type_string" (A type string. Specifies the type
and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter "info" of
type "object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.get_genome_set_v1',
[params], self._service_ver, context)
def save_genome_set_v1(self, params, context=None):
"""
:param params: instance of type "SaveGenomeSetV1Params"
(workspace_name or workspace_id - alternative options defining
target workspace, output_object_name - workspace object name (this
parameter is used together with one of workspace params from
above)) -> structure: parameter "workspace" of String, parameter
"output_object_name" of String, parameter "data" of type
"GenomeSet" (@meta ws description as description @meta ws
length(items) as item_count) -> structure: parameter "description"
of String, parameter "items" of list of type "GenomeSetItem" (When
saving an GenomeSet, only 'ref' is required. You should never set
'info'. 'info' is provided optionally when fetching the
GenomeSet. ref_path is optionally returned by get_genome_set_v1()
when its input parameter 'include_set_item_ref_paths' is set to
1.) -> structure: parameter "ref" of type "ws_genome_id" (The
workspace ID for a Genome object. @id ws KBaseGenomes.Genome),
parameter "ref_path" of type "ws_genome_id" (The workspace ID for
a Genome object. @id ws KBaseGenomes.Genome), parameter "label" of
String, parameter "info" of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
:returns: instance of type "SaveGenomeSetV1Result" -> structure:
parameter "set_ref" of String, parameter "set_info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.save_genome_set_v1',
[params], self._service_ver, context)
def create_sample_set(self, params, context=None):
"""
:param params: instance of type "CreateRNASeqSampleSetParams"
(******* Sample SET METHODS ************) -> structure: parameter
"ws_id" of String, parameter "sampleset_id" of String, parameter
"sampleset_desc" of String, parameter "domain" of String,
parameter "platform" of String, parameter "sample_ids" of list of
String, parameter "condition" of list of String, parameter
"source" of String, parameter "Library_type" of String, parameter
"publication_id" of String, parameter "external_source_date" of
String
:returns: instance of type "CreateRNASeqSampleSetResult" ->
structure: parameter "set_ref" of String, parameter "set_info" of
type "object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String
"""
return self._client.call_method(
'SetAPI.create_sample_set',
[params], self._service_ver, context)
def list_sets(self, params, context=None):
"""
Use to get the top-level sets in a WS. Optionally can include
one level down members of those sets.
NOTE: DOES NOT PRESERVE ORDERING OF ITEM LIST IN DATA
:param params: instance of type "ListSetParams" (workspace -
workspace name or ID (alternative to workspaces parameter),
workspaces - list of workspace name ot ID (alternative to
workspace parameter), include_metadata - flag for including
metadata into Set object info and into object info of items (it
affects DP raw data as well), include_raw_data_palettes - advanced
option designed for optimization of listing methods in
NarrativeService. include_set_item_ref_paths - 1 or 0, if 1,
additionally provides ref_path for each item in the set. The
ref_path for each item is either ref_path_to_set;item_ref (if
ref_path_to_set is given) or set_ref;item_ref) -> structure:
parameter "workspace" of String, parameter "workspaces" of String,
parameter "include_set_item_info" of type "boolean" (A boolean. 0
= false, 1 = true.), parameter "include_metadata" of type
"boolean" (A boolean. 0 = false, 1 = true.), parameter
"include_raw_data_palettes" of type "boolean" (A boolean. 0 =
false, 1 = true.), parameter "include_set_item_ref_paths" of type
"boolean" (A boolean. 0 = false, 1 = true.)
:returns: instance of type "ListSetResult" (raw_data_palettes -
optional DP output turned on by 'include_raw_data_palettes' in
input parameters, raw_data_palette_refs - optional DP output
(mapping from workspace Id to reference to DataPalette container
existing in particular workspace) turned on by
'include_raw_data_palettes' in input parameters,) -> structure:
parameter "sets" of list of type "SetInfo" (dp_ref - optional
reference to DataPalette container in case given set object is
coming from DataPalette.) -> structure: parameter "ref" of type
"ws_obj_id" (The workspace ID for a any data object. @id ws),
parameter "info" of type "object_info" (Information about an
object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter "items" of
list of type "SetItemInfo" (ref_path is optionally returned by
list_sets() and get_set_items(), when the input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_obj_id" (The workspace ID for a any data object.
@id ws), parameter "ref_path" of type "ws_obj_id" (The workspace
ID for a any data object. @id ws), parameter "info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String, parameter "dp_ref" of type "ws_obj_id" (The workspace ID
for a any data object. @id ws), parameter "raw_data_palettes" of
list of type "DataInfo" -> structure: parameter "ref" of type
"ws_ref" (@id ws), parameter "info" of type "object_info"
(Information about an object, including user provided metadata.
obj_id objid - the numerical id of the object. obj_name name - the
name of the object. type_string type - the type of the object.
timestamp save_date - the save date of the object. obj_ver ver -
the version of the object. username saved_by - the user that saved
or copied the object. ws_id wsid - the workspace containing the
object. ws_name workspace - the workspace containing the object.
string chsum - the md5 checksum of the object. int size - the size
of the object in bytes. usermeta meta - arbitrary user-supplied
metadata about the object.) -> tuple of size 11: parameter "objid"
of type "obj_id" (The unique, permanent numerical ID of an
object.), parameter "name" of type "obj_name" (A string used as a
name for an object. Any string consisting of alphanumeric
characters and the characters |._- that is not an integer is
acceptable.), parameter "type" of type "type_string" (A type
string. Specifies the type and its version in a single string in
the format [module].[typename]-[major].[minor]: module - a string.
The module name of the typespec containing the type. typename - a
string. The name of the type as assigned by the typedef statement.
major - an integer. The major version of the type. A change in the
major version implies the type has changed in a non-backwards
compatible way. minor - an integer. The minor version of the type.
A change in the minor version implies that the type has changed in
a way that is backwards compatible with previous type definitions.
In many cases, the major and minor versions are optional, and if
not provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter
"raw_data_palette_refs" of mapping from String to String
"""
return self._client.call_method(
'SetAPI.list_sets',
[params], self._service_ver, context)
def get_set_items(self, params, context=None):
"""
Use to drill down into one or more sets, the position in the
return 'sets' list will match the position in the input ref list.
NOTE: DOES NOT PRESERVE ORDERING OF ITEM LIST IN DATA
:param params: instance of type "GetSetItemsParams" -> structure:
parameter "set_refs" of list of type "SetReference"
(include_set_item_ref_paths - 1 or 0, if 1, additionally provides
ref_path for each item in the set. The ref_path for each item is
either ref_path_to_set;item_ref (if ref_path_to_set is given) or
set_ref;item_ref) -> structure: parameter "ref" of type
"ws_obj_id" (The workspace ID for a any data object. @id ws),
parameter "ref_path_to_set" of list of type "ws_obj_id" (The
workspace ID for a any data object. @id ws), parameter
"include_set_item_ref_paths" of type "boolean" (A boolean. 0 =
false, 1 = true.)
:returns: instance of type "GetSetItemsResult" -> structure:
parameter "sets" of list of type "SetInfo" (dp_ref - optional
reference to DataPalette container in case given set object is
coming from DataPalette.) -> structure: parameter "ref" of type
"ws_obj_id" (The workspace ID for a any data object. @id ws),
parameter "info" of type "object_info" (Information about an
object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String, parameter "items" of
list of type "SetItemInfo" (ref_path is optionally returned by
list_sets() and get_set_items(), when the input parameter
'include_set_item_ref_paths' is set to 1.) -> structure: parameter
"ref" of type "ws_obj_id" (The workspace ID for a any data object.
@id ws), parameter "ref_path" of type "ws_obj_id" (The workspace
ID for a any data object. @id ws), parameter "info" of type
"object_info" (Information about an object, including user
provided metadata. obj_id objid - the numerical id of the object.
obj_name name - the name of the object. type_string type - the
type of the object. timestamp save_date - the save date of the
object. obj_ver ver - the version of the object. username saved_by
- the user that saved or copied the object. ws_id wsid - the
workspace containing the object. ws_name workspace - the workspace
containing the object. string chsum - the md5 checksum of the
object. int size - the size of the object in bytes. usermeta meta
- arbitrary user-supplied metadata about the object.) -> tuple of
size 11: parameter "objid" of type "obj_id" (The unique, permanent
numerical ID of an object.), parameter "name" of type "obj_name"
(A string used as a name for an object. Any string consisting of
alphanumeric characters and the characters |._- that is not an
integer is acceptable.), parameter "type" of type "type_string" (A
type string. Specifies the type and its version in a single string
in the format [module].[typename]-[major].[minor]: module - a
string. The module name of the typespec containing the type.
typename - a string. The name of the type as assigned by the
typedef statement. major - an integer. The major version of the
type. A change in the major version implies the type has changed
in a non-backwards compatible way. minor - an integer. The minor
version of the type. A change in the minor version implies that
the type has changed in a way that is backwards compatible with
previous type definitions. In many cases, the major and minor
versions are optional, and if not provided the most recent version
will be used. Example: MyModule.MyType-3.1), parameter "save_date"
of type "timestamp" (A time in the format YYYY-MM-DDThh:mm:ssZ,
where Z is either the character Z (representing the UTC timezone)
or the difference in time to UTC in the format +/-HHMM, eg:
2012-12-17T23:24:06-0500 (EST time) 2013-04-03T08:56:32+0000 (UTC
time) 2013-04-03T08:56:32Z (UTC time)), parameter "version" of
Long, parameter "saved_by" of type "username" (Login name of a
KBase user account.), parameter "wsid" of type "ws_id" (The
unique, permanent numerical ID of a workspace.), parameter
"workspace" of type "ws_name" (A string used as a name for a
workspace. Any string consisting of alphanumeric characters and
"_", ".", or "-" that is not an integer is acceptable. The name
may optionally be prefixed with the workspace owner's user name
and a colon, e.g. kbasetest:my_workspace.), parameter "chsum" of
String, parameter "size" of Long, parameter "meta" of type
"usermeta" (User provided metadata about an object. Arbitrary
key-value pairs provided by the user.) -> mapping from String to
String, parameter "dp_ref" of type "ws_obj_id" (The workspace ID
for a any data object. @id ws)
"""
return self._client.call_method(
'SetAPI.get_set_items',
[params], self._service_ver, context)
def status(self, context=None):
return self._client.call_method('SetAPI.status',
[], self._service_ver, context)
| 70.506401
| 89
| 0.65672
| 20,786
| 148,698
| 4.625613
| 0.02011
| 0.027707
| 0.028144
| 0.009901
| 0.967768
| 0.966052
| 0.962776
| 0.962069
| 0.961954
| 0.95846
| 0
| 0.019194
| 0.27962
| 148,698
| 2,108
| 90
| 70.539848
| 0.878388
| 0.861612
| 0
| 0.369565
| 1
| 0.01087
| 0.133478
| 0.098478
| 0
| 0
| 0
| 0
| 0
| 1
| 0.206522
| false
| 0.021739
| 0.043478
| 0.01087
| 0.456522
| 0.01087
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0a030fc22d4cf99b85608e9d71e6f7fca38a5e8f
| 134,801
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/synapse/tests/latest/test_synapse_scenario.py
|
hivyas/azure-cli
|
bc977a36f3079a6d79633b9c5c29c90462710528
|
[
"MIT"
] | 1
|
2020-03-24T13:32:56.000Z
|
2020-03-24T13:32:56.000Z
|
src/azure-cli/azure/cli/command_modules/synapse/tests/latest/test_synapse_scenario.py
|
hivyas/azure-cli
|
bc977a36f3079a6d79633b9c5c29c90462710528
|
[
"MIT"
] | null | null | null |
src/azure-cli/azure/cli/command_modules/synapse/tests/latest/test_synapse_scenario.py
|
hivyas/azure-cli
|
bc977a36f3079a6d79633b9c5c29c90462710528
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
import unittest
import time
from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer, StorageAccountPreparer, record_only
TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
class SynapseScenarioTests(ScenarioTest):
location = "eastus"
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_data_connection_event_grid(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
'database': self.create_random_name(prefix='testdtabase', length=15),
'dataConnectionName': self.create_random_name(prefix='dataConName', length=15),
"eventhub_name": self.create_random_name("ehsrv", 20),
"eventhub_namespace": self.create_random_name("ehnamespace", 20),
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto database create '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--read-write-database location="{location}" soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
# create event hub namespace
self.cmd('az eventhubs namespace create --resource-group {rg} -n {eventhub_namespace} --location eastus',
checks=[
self.check('provisioningState', 'Succeeded')])
# create event hub
self.kwargs['ehresourceid'] = self.cmd(
'az eventhubs eventhub create --resource-group {rg} -n {eventhub_name} --namespace-name {eventhub_namespace}',
checks=[
self.check('status', 'Active')]).get_output_in_json()['id']
self.kwargs['subscription_id'] = self.get_subscription_id()
self.cmd('az synapse kusto data-connection event-grid create '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--consumer-group "$Default" '
'--event-hub-resource-id "{ehresourceid}" '
'--storage-account-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Storage/storageAccounts/{storage-account}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection show '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection list '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections')
])
self.cmd('az synapse kusto data-connection event-grid update '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--consumer-group "$Default" '
'--event-hub-resource-id "{ehresourceid}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection delete -y '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto data-connection show '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_data_connection_iot_hub(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
'database': self.create_random_name(prefix='testdtabase', length=15),
'dataConnectionName': self.create_random_name(prefix='dataConName', length=15),
'iotHubName': self.create_random_name(prefix='testiothub', length=15),
'iotHubSharedAccessPolicyName': 'registryRead'
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto database create '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--read-write-database location="{location}" soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
self.kwargs['iotresourceid'] = self.cmd(
'az iot hub create --resource-group "{rg}" --name "{iotHubName}" --location "{location}" ').get_output_in_json()['id']
self.cmd('az synapse kusto data-connection iot-hub create '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--consumer-group "$Default" '
'--iot-hub-resource-id "{iotresourceid}" '
'--shared-access-policy-name "{iotHubSharedAccessPolicyName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection show '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection list '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections')
])
self.cmd('az synapse kusto data-connection iot-hub update '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--consumer-group "$Default" '
'--iot-hub-resource-id "{iotresourceid}" '
'--shared-access-policy-name "{iotHubSharedAccessPolicyName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection delete -y '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto data-connection show '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_data_connection_event_hub(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
'database': self.create_random_name(prefix='testdtabase', length=15),
'dataConnectionName': self.create_random_name(prefix='dataConName', length=15),
"eventhub_name": self.create_random_name("ehsrv", 20),
"eventhub_namespace": self.create_random_name("ehnamespace", 20),
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto database create '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--read-write-database location="{location}" soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
# create event hub namespace
self.cmd('az eventhubs namespace create --resource-group {rg} -n {eventhub_namespace} --location eastus',
checks=[
self.check('provisioningState', 'Succeeded')])
# create event hub
self.kwargs['ehresourceid'] = self.cmd(
'az eventhubs eventhub create --resource-group {rg} -n {eventhub_name} --namespace-name {eventhub_namespace}',
checks=[
self.check('status', 'Active')]).get_output_in_json()['id']
self.cmd('az synapse kusto data-connection event-hub create '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--consumer-group "$Default" '
'--event-hub-resource-id "{ehresourceid}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection show '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection list '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections')])
self.cmd('az synapse kusto data-connection event-hub update '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--consumer-group "$Default" '
'--event-hub-resource-id "{ehresourceid}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{dataConnectionName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/DataConnections'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto data-connection delete -y '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto data-connection show '
'--data-connection-name "{dataConnectionName}" '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_database_principal_assignment(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
'database': self.create_random_name(prefix='testdtabase', length=15),
'principalAssignmentName': self.create_random_name(prefix='kstprinpal', length=15),
'principalId': '9c527a58-9c1d-4c4f-970f-61feb236b74a'
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto database create '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--read-write-database location="{location}" soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto database-principal-assignment create '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--principal-id "{principalId}" '
'--principal-type "App" '
'--role "Admin" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{principalAssignmentName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/PrincipalAssignments'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto database-principal-assignment show '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks= [
self.check('name', "{workspace}/{kustoPool}/{database}/{principalAssignmentName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/PrincipalAssignments'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto database-principal-assignment list '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/PrincipalAssignments')
])
self.cmd('az synapse kusto database-principal-assignment update '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--principal-id "{principalId}" '
'--principal-type "App" '
'--role "Admin" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}/{principalAssignmentName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases/PrincipalAssignments'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto database-principal-assignment delete -y '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto database-principal-assignment show '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_pool_principal_assignment(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
'database': self.create_random_name(prefix='testdtabase', length=15),
'principalAssignmentName': self.create_random_name(prefix='kstprinpal', length=15),
'principalId': '9c527a58-9c1d-4c4f-970f-61feb236b74a'
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto database create '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--read-write-database location="{location}" soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto pool-principal-assignment create '
'--kusto-pool-name "{kustoPool}" '
'--principal-id "{principalId}" '
'--principal-type "App" '
'--role "AllDatabasesAdmin" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{principalAssignmentName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/PrincipalAssignments'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto pool-principal-assignment show '
'--kusto-pool-name "{kustoPool}" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{principalAssignmentName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/PrincipalAssignments'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto pool-principal-assignment list '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/kustoPools/PrincipalAssignments')
])
self.cmd('az synapse kusto pool-principal-assignment update '
'--kusto-pool-name "{kustoPool}" '
'--principal-id "{principalId}" '
'--principal-type "App" '
'--role "AllDatabasesAdmin" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{principalAssignmentName}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/PrincipalAssignments'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto pool-principal-assignment delete -y '
'--kusto-pool-name "{kustoPool}" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto pool-principal-assignment show '
'--kusto-pool-name "{kustoPool}" '
'--principal-assignment-name "{principalAssignmentName}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_attached_database_configuration(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
'leaderkustoPool': self.create_random_name(prefix='testkstpool', length=15),
'database': self.create_random_name(prefix='testdtabase', length=15),
'database-configuration-name': self.create_random_name(prefix='conf', length=15)
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto pool create '
'--name "{leaderkustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{leaderkustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto database create '
'--database-name "{database}" '
'--kusto-pool-name "{leaderkustoPool}" '
'--read-write-database location="{location}" soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{leaderkustoPool}/{database}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
self.kwargs['leaderkpoolsourceid'] = self.cmd('az synapse kusto pool show '
'--name "{leaderkustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"').get_output_in_json()['id']
self.kwargs['kpoolsourceid'] = self.cmd('az synapse kusto pool show '
'--name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"').get_output_in_json()['id']
self.cmd('az synapse kusto attached-database-configuration create '
'--attached-database-configuration-name "{database-configuration-name}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--kusto-pool-resource-id "{leaderkpoolsourceid}" '
'--database-name "{database}" '
'--default-principals-modification-kind "Union" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database-configuration-name}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/AttachedDatabaseConfigurations'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto attached-database-configuration show '
'--attached-database-configuration-name "{database-configuration-name}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database-configuration-name}"),
self.check('location', "east us", case_sensitive=False), # "{location}", case_sensitive=False),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/AttachedDatabaseConfigurations'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto attached-database-configuration list '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/kustoPools/AttachedDatabaseConfigurations')
])
self.cmd('az synapse kusto attached-database-configuration update '
'--attached-database-configuration-name "{database-configuration-name}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--kusto-pool-resource-id "{leaderkpoolsourceid}" '
'--database-name "{database}" '
'--default-principals-modification-kind "Union" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database-configuration-name}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/AttachedDatabaseConfigurations'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto pool detach-follower-database '
'--attached-database-configuration-name "{database-configuration-name}" '
'--kusto-pool-resource-id "{kpoolsourceid}" '
'--name "{leaderkustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
import time
time.sleep(60)
self.cmd('az synapse kusto attached-database-configuration create '
'--attached-database-configuration-name "{database-configuration-name}" '
'--kusto-pool-name "{kustoPool}" '
'--location "{location}" '
'--kusto-pool-resource-id "{leaderkpoolsourceid}" '
'--database-name "{database}" '
'--default-principals-modification-kind "Union" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database-configuration-name}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/AttachedDatabaseConfigurations'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto attached-database-configuration delete -y '
'--attached-database-configuration-name "{database-configuration-name}" '
'--kusto-pool-name "{leaderkustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto attached-database-configuration show '
'--attached-database-configuration-name "{database-configuration-name}" '
'--kusto-pool-name "{leaderkustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_database(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
'database': self.create_random_name(prefix='testdtabase', length=15)
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto database create '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--read-write-database location="{location}" soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto database list '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/kustoPools/Databases')
])
self.cmd('az synapse kusto database show '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('location', "east us", case_sensitive=False),#"{location}", case_sensitive=False),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto database update '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--read-write-database soft-delete-period="P1D" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}/{database}"),
self.check('location', "east us", case_sensitive=False),#"{location}", case_sensitive=False),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools/Databases'),
self.check('provisioningState', 'Succeeded')
])
self.cmd('az synapse kusto database delete -y --database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto database show '
'--database-name "{database}" '
'--kusto-pool-name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_kusto_pool(self):
self.kwargs.update({
'location': 'east us',
'kustoPool': self.create_random_name(prefix='testkstpool', length=15),
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
self.cmd('az synapse kusto pool create '
'--name "{kustoPool}" '
'--location "{location}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check('name', "{workspace}/{kustoPool}"),
self.check('type', 'Microsoft.Synapse/workspaces/kustoPools'),
self.check('provisioningState', 'Succeeded'),
self.check('location', 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized"),
self.check('enablePurge', True),
self.check('enableStreamingIngest', True),
])
self.cmd('az synapse kusto pool show '
'--name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks= [
self.check("location", 'east us', case_sensitive=False),
self.check("sku.name", "Storage optimized", case_sensitive=False),
self.check("sku.capacity", 2),
self.check("sku.size", "Medium", case_sensitive=False),
])
# az synapse kusto pool list-sku
self.cmd('az synapse kusto pool list-sku '
'--name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
# az synapse kusto pool list
self.cmd('az synapse kusto pool list '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
# az synapse kusto pool update
self.cmd('az synapse kusto pool update '
'--name "{kustoPool}" '
'--enable-purge true '
'--enable-streaming-ingest true '
'--sku name="Storage optimized" capacity=2 size="Medium" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"',
checks=[
self.check("name", "{workspace}/{kustoPool}", case_sensitive=False),
self.check("location", 'east us', case_sensitive=False),
self.check("enablePurge", True),
self.check("enableStreamingIngest", True),
self.check("sku.name", "Storage optimized", case_sensitive=False),
self.check("sku.capacity", 2),
self.check("sku.size", "Medium", case_sensitive=False),
])
# az synapse kusto pool add-language-extension
self.cmd('az synapse kusto pool add-language-extension '
'--name "{kustoPool}" '
'--value language-extension-name="PYTHON" '
'--value language-extension-name="R" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
# az synapse kusto pool list-language-extension
self.cmd('az synapse kusto pool list-language-extension '
'--name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto pool remove-language-extension '
'--name "{kustoPool}" '
'--value language-extension-name="PYTHON" '
'--value language-extension-name="R" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto pool start '
'--name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
self.cmd('az synapse kusto pool stop '
'--name "{kustoPool}" '
'--resource-group "{rg}" '
'--workspace-name "{workspace}"')
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_workspaces(self, resource_group, storage_account):
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
# get workspace with workspace name
workspace = self.cmd('az synapse workspace show --name {workspace} --resource-group {rg}', checks=[
self.check('name', self.kwargs['workspace']),
self.check('type', 'Microsoft.Synapse/workspaces'),
self.check('provisioningState', 'Succeeded')
]).get_output_in_json()
self.kwargs["workspace-id"] = workspace['id']
# list all workspaces under a specific resource group
self.cmd('az synapse workspace list --resource-group {rg}', checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces')
])
# update workspace
self.cmd('az synapse workspace update --ids {workspace-id} --tags key1=value1', checks=[
self.check('tags.key1', 'value1'),
self.check('name', self.kwargs['workspace']),
self.check('id', self.kwargs['workspace-id']),
self.check('type', 'Microsoft.Synapse/workspaces'),
self.check('provisioningState', 'Succeeded')
])
# delete workspace with workspace name
self.cmd('az synapse workspace delete --name {workspace} --resource-group {rg} --yes')
import time
time.sleep(120)
self.cmd('az synapse workspace show --name {workspace} --resource-group {rg}', expect_failure=True)
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_managed_virtual_network_workspace(self):
# test workspace with managed virtual network
self._create_workspace("--enable-managed-virtual-network")
self.cmd('az synapse workspace show --name {workspace} --resource-group {rg}', checks=[
self.check('name', self.kwargs['workspace']),
self.check('type', 'Microsoft.Synapse/workspaces'),
self.check('provisioningState', 'Succeeded'),
self.check('managedVirtualNetwork', 'default')
])
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_spark_pool(self):
self.kwargs.update({
'location': 'eastus',
'spark-pool': self.create_random_name(prefix='testpool', length=15),
'spark-version': '2.4'
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
# create spark pool
spark_pool = self.cmd('az synapse spark pool create --name {spark-pool} --spark-version {spark-version}'
' --workspace {workspace} --resource-group {rg} --node-count 3 --node-size Medium',
checks=[
self.check('name', self.kwargs['spark-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/bigDataPools'),
self.check('provisioningState', 'Succeeded')
]).get_output_in_json()
self.kwargs['pool-id'] = spark_pool['id']
# get spark pool with spark pool name
self.cmd('az synapse spark pool show --name {spark-pool} --workspace {workspace} --resource-group {rg}',
checks=[
self.check('name', self.kwargs['spark-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/bigDataPools'),
self.check('provisioningState', 'Succeeded')
])
# list all spark pools under the workspace
self.cmd('az synapse spark pool list --workspace {workspace} --resource-group {rg}', checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/bigDataPools')
])
# update spark pool
self.cmd('az synapse spark pool update --ids {pool-id} --tags key1=value1', checks=[
self.check('tags.key1', 'value1'),
self.check('name', self.kwargs['spark-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/bigDataPools'),
self.check('provisioningState', 'Succeeded')
])
# delete spark pool with spark pool name
self.cmd(
'az synapse spark pool delete --name {spark-pool} --workspace {workspace} --resource-group {rg} --yes')
self.cmd('az synapse spark pool show --name {spark-pool} --workspace {workspace} --resource-group {rg}',
expect_failure=True)
@record_only()
@unittest.skip('(keyvaultfailure) KeyVault set-policy failed with Bad Request')
def test_workspace_with_cmk(self):
self.kwargs.update({
'location': 'eastus',
'workspace': 'testsynapseworkspacecmk',
'rg': 'testrg',
'storage-account': 'teststorageforsynapsecmk',
'file-system': self.create_random_name(prefix='fs', length=16),
'login-user': 'cliuser1',
'login-password': self.create_random_name(prefix='Pswd1', length=16),
'key-identifier': 'https://testcmksoftdelete.vault.azure.net/keys/newcmk',
'new-key-identifier': 'https://testcmksoftdelete.vault.azure.net/keys/newkey',
'managed-identity': '00000000-0000-1111-2222-333333333333'
})
# create workspace supporting cmk, data exfiltration
workspace_cmk = self.cmd(
'az synapse workspace create --name {workspace} --resource-group {rg} --storage-account {storage-account} '
'--file-system {file-system} --sql-admin-login-user {login-user} '
'--sql-admin-login-password {login-password} --key-identifier {key-identifier} '
' --location {location} --enable-managed-vnet True --prevent-exfiltration True --allowed-tenant-ids \'""\' ', checks=[
self.check('name', self.kwargs['workspace']),
self.check('type', 'Microsoft.Synapse/workspaces'),
self.check('provisioningState', 'Succeeded')
]).get_output_in_json()
self.kwargs['managed-identity'] = workspace_cmk['identity']['principalId']
# set access policy
self.cmd(
'az keyvault set-policy --name testcmksoftdelete --object-id {managed-identity} --key-permissions get unwrapKey wrapKey ')
# active workspace
self.cmd(
'az synapse workspace activate --name default --key-identifier {key-identifier} --resource-group {rg} --workspace-name {workspace}', checks=[
self.check('name', 'default'),
self.check('type', 'Microsoft.Synapse/workspaces/keys')
])
import time
time.sleep(120)
# create workspace key
self.cmd(
'az synapse workspace key create --name newkey --key-identifier {new-key-identifier} --resource-group {rg} --workspace-name {workspace}', checks=[
self.check('name', 'newkey'),
self.check('type', 'Microsoft.Synapse/workspaces/keys')
])
# set access policy
self.cmd(
'az keyvault set-policy --name testcmksoftdelete --object-id {managed-identity} --key-permissions get unwrapKey wrapKey ')
# list workspace key
self.cmd(
'az synapse workspace key list --resource-group {rg} --workspace-name {workspace}', checks=[
self.check('[0].name', 'default'),
self.check('[0].type', 'Microsoft.Synapse/workspaces/keys'),
self.check('[0].keyVaultUrl', self.kwargs['key-identifier']),
])
# show workspace key
self.cmd(
'az synapse workspace key show --name default --resource-group {rg} --workspace-name {workspace}', checks=[
self.check('name', 'default'),
self.check('type', 'Microsoft.Synapse/workspaces/keys'),
self.check('keyVaultUrl', self.kwargs['key-identifier']),
])
# show sql access to managed identity
self.cmd(
'az synapse workspace managed-identity show-sql-access --resource-group {rg} --workspace-name {workspace}', checks=[
self.check('grantSqlControlToManagedIdentity.actualState', 'Disabled'),
self.check('type', 'Microsoft.Synapse/workspaces/managedIdentitySqlControlSettings')
])
# grant sql access to managed identity
self.cmd(
'az synapse workspace managed-identity grant-sql-access --resource-group {rg} --workspace-name {workspace}', checks=[
self.check('grantSqlControlToManagedIdentity.actualState', 'Enabled'),
self.check('type', 'Microsoft.Synapse/workspaces/managedIdentitySqlControlSettings')
])
# invoke sql access to managed identity
self.cmd(
'az synapse workspace managed-identity revoke-sql-access --resource-group {rg} --workspace-name {workspace}', checks=[
self.check('grantSqlControlToManagedIdentity.actualState', 'Disabled'),
self.check('type', 'Microsoft.Synapse/workspaces/managedIdentitySqlControlSettings')
])
# switch active key
self.cmd(
'az synapse workspace update --resource-group {rg} --name {workspace} --key-name newkey ', checks=[
self.check('encryption.cmk.key.name', 'newkey')
])
# update allowed tenant ids
self.cmd(
'az synapse workspace update --resource-group {rg} --name {workspace} --allowed-tenant-ids 72f988bf-86f1-41af-91ab-2d7cd011db47 ', checks=[
self.check('managedVirtualNetworkSettings.allowedAadTenantIdsForLinking[0]', "72f988bf-86f1-41af-91ab-2d7cd011db47")
])
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_sql_pool(self):
self.kwargs.update({
'location': 'eastus',
'workspace': 'testsynapseworkspace',
'sql-pool': self.create_random_name(prefix='testsqlpool', length=15),
'performance-level': 'DW400c'
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
# create sql pool
sql_pool = self.cmd(
'az synapse sql pool create --name {sql-pool} --performance-level {performance-level} '
'--workspace {workspace} --resource-group {rg}', checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('provisioningState', 'Succeeded'),
self.check('status', 'Online')
]).get_output_in_json()
self.kwargs['pool-id'] = sql_pool['id']
# get sql pool with sql pool name
self.cmd('az synapse sql pool show --name {sql-pool} --workspace {workspace} --resource-group {rg}',
checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('provisioningState', 'Succeeded'),
self.check('status', 'Online')
])
# list all sql pools under the workspace
self.cmd('az synapse sql pool list --workspace {workspace} --resource-group {rg}', checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/sqlPools')
])
# update sql pool
self.cmd('az synapse sql pool update --ids {pool-id} --tags key1=value1')
# get sql pool with sql pool id
self.cmd('az synapse sql pool show --ids {pool-id}',
checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('tags.key1', 'value1'),
self.check('provisioningState', 'Succeeded'),
self.check('status', 'Online')
])
# pause sql pool
self.cmd('az synapse sql pool pause --name {sql-pool} --workspace {workspace} --resource-group {rg}', checks=[])
self.cmd('az synapse sql pool show --name {sql-pool} --workspace {workspace} --resource-group {rg}',
checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('status', 'Paused')
])
# resume sql pool
self.cmd('az synapse sql pool resume --name {sql-pool} --workspace {workspace} --resource-group {rg}',
checks=[])
self.cmd('az synapse sql pool show --name {sql-pool} --workspace {workspace} --resource-group {rg}',
checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('status', 'Online')
])
# delete sql pool with sql pool name
self.cmd(
'az synapse sql pool delete --name {sql-pool} --workspace {workspace} --resource-group {rg} --yes')
self.cmd('az synapse sql pool show --name {sql-pool} --workspace {workspace} --resource-group {rg}',
expect_failure=True)
@record_only()
def test_sql_pool_restore_and_list_deleted(self):
self.kwargs.update({
'location': 'eastus',
'workspace': 'testingsynapseworkspace',
'rg': 'rgtesting',
'sql-pool': 'testrestoresqlpool ',
'performance-level': 'DW1000c',
'dest-sql-pool': self.create_random_name(prefix='destsqlpool', length=15),
'restore-point-time': '2021-11-04T07:02:09'
})
# restore sql pool
self.cmd('az synapse sql pool restore --name {sql-pool} --workspace-name {workspace} --resource-group {rg} '
'--dest-name {dest-sql-pool} --time {restore-point-time}',
checks=[
self.check('name', self.kwargs['dest-sql-pool'])
])
# get the new created sql pool
self.cmd('az synapse sql pool show --name {dest-sql-pool} --workspace-name {workspace} --resource-group {rg}',
checks=[
self.check('name', self.kwargs['dest-sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('provisioningState', 'Succeeded'),
self.check('status', 'Online')
])
# delete dest sql pool with dest sql pool name
self.cmd(
'az synapse sql pool delete --name {dest-sql-pool} --workspace-name {workspace} --resource-group {rg} --yes')
# deleted list take several mins to show
time.sleep(200)
# test list-deleted
self.cmd('az synapse sql pool list-deleted --workspace-name {workspace} --resource-group {rg}',
checks=[
self.greater_than("length([])", 0)
])
@record_only()
def test_sql_pool_classification_and_recommendation(self):
self.kwargs.update({
'location': 'eastus',
'workspace': 'testingsynapseworkspace',
'rg': 'rgtesting',
'sql-pool': 'testingsqlpool',
'schema': 'dbo',
'table': 'Persons',
'column': 'City',
'label': 'Confidential',
'information-type': '"Contact Info"'
})
# classification create
self.cmd('az synapse sql pool classification create --name {sql-pool} --workspace-name {workspace} '
'--resource-group {rg} --schema {schema} --table {table} --column {column} '
'--label {label} --information-type {information-type}',
checks=[
self.check('labelName', self.kwargs['label'])
])
# classification show
self.cmd('az synapse sql pool classification show --name {sql-pool} --workspace-name {workspace} '
'--resource-group {rg} --schema {schema} --table {table} --column {column}',
checks=[
self.check('labelName', self.kwargs['label'])
])
# classification list
self.cmd('az synapse sql pool classification list --name {sql-pool} --workspace-name {workspace} '
'--resource-group {rg}',
checks=[
self.check('[0].labelName', self.kwargs['label'])
])
# classification update
self.cmd('az synapse sql pool classification update --name {sql-pool} --workspace-name {workspace} '
'--resource-group {rg} --schema {schema} --table {table} --column {column} '
'--label {label} --information-type {information-type}')
# classification delete
self.cmd('az synapse sql pool classification delete --name {sql-pool} --workspace-name {workspace} '
'--resource-group {rg} --schema {schema} --table {table} --column {column}')
# recommendation enable
self.cmd('az synapse sql pool classification recommendation enable --name {sql-pool} '
'--workspace-name {workspace} --resource-group {rg} '
'--schema {schema} --table {table} --column {column}')
# recommendation list
self.cmd('az synapse sql pool classification recommendation list --name {sql-pool} '
'--workspace-name {workspace} --resource-group {rg}',
checks=[
self.greater_than("length([])", 0)
])
self.cmd('az synapse sql pool classification recommendation disable --name {sql-pool} '
'--workspace-name {workspace} --resource-group {rg} '
'--schema {schema} --table {table} --column {column}')
# After disable the length of list should be equal with 0
self.cmd('az synapse sql pool classification recommendation list --name {sql-pool} '
'--workspace-name {workspace} --resource-group {rg}',
checks=[
self.check("length([])", 0)
])
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_sql_pool_tde(self):
self.kwargs.update({
'location': 'eastus',
'sql-pool': self.create_random_name(prefix='testsqlpool', length=15),
'performance-level': 'DW400c'
})
# create a workspace
self._create_workspace()
# create sql pool
self.cmd(
'az synapse sql pool create --name {sql-pool} --performance-level {performance-level} '
'--workspace {workspace} --resource-group {rg}', checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('provisioningState', 'Succeeded'),
self.check('status', 'Online')
]).get_output_in_json()
self.cmd(
'az synapse sql pool tde set --status Enabled --name {sql-pool} --workspace-name {workspace} \
--resource-group {rg} --transparent-data-encryption-name current')
self.cmd('az synapse sql pool tde show --name {sql-pool} --workspace-name {workspace} --resource-group {rg} \
--transparent-data-encryption-name current',
checks=[
self.check('name', "current"),
self.check('status', "Enabled")
])
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_sql_pool_threat_policy(self):
self.kwargs.update({
'location': 'eastus',
'sql-pool': self.create_random_name(prefix='testsqlpool', length=15),
'performance-level': 'DW400c',
'threat-policy': 'threatpolicy'
})
# create a workspace
self._create_workspace()
# create sql pool
self.cmd(
'az synapse sql pool create --name {sql-pool} --performance-level {performance-level} '
'--workspace {workspace} --resource-group {rg}', checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('provisioningState', 'Succeeded'),
self.check('status', 'Online')
]).get_output_in_json()
self.cmd('az synapse sql pool threat-policy update --state Enabled --storage-account {storage-account} '
'--name {sql-pool} --workspace-name {workspace} --resource-group {rg} --security-alert-policy-name {threat-policy}')
self.cmd('az synapse sql pool threat-policy show '
'--name {sql-pool} --workspace-name {workspace} --resource-group {rg} --security-alert-policy-name {threat-policy}',
checks=[
self.check('state', 'Enabled')
])
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_sql_ws_audit_policy_logentry_eventhub(self):
self.kwargs.update({
'location': 'eastus',
'log_analytics_workspace_name': self.create_random_name("laws", 20),
'retention-days': '30',
'audit-actions-input': 'DATABASE_LOGOUT_GROUP',
'audit-actions-expected': ['DATABASE_LOGOUT_GROUP'],
'eventhub_name': self.create_random_name("ehsrv", 20),
'eventhub_namespace': self.create_random_name("ehnamespace", 20),
'eventhub_auth_rule': self.create_random_name("ehauthruledb", 20),
})
# create a workspace
self._create_workspace()
self.kwargs['storage-endpoint'] = self._get_storage_endpoint(self.kwargs['storage-account'], self.kwargs['rg'])
self.kwargs['storage-key'] = self._get_storage_key(self.kwargs['storage-account'], self.kwargs['rg'])
# test show command
self.cmd('az synapse sql audit-policy show '
'--workspace-name {workspace} --resource-group {rg} --blob-auditing-policy-name bapname',
checks=[
self.check('state', 'Disabled')
])
self.cmd('az synapse sql audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --state Enabled --bsts Enabled --storage-key {storage-key} --storage-endpoint={storage-endpoint}'
' --retention-days={retention-days} --actions {audit-actions-input} --blob-auditing-policy-name bapname',
checks=[
self.check('state', 'Enabled'),
self.check('storageEndpoint', self.kwargs['storage-endpoint']),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])
])
# get audit policy
self.cmd('az synapse sql audit-policy show '
'--workspace-name {workspace} --resource-group {rg} --blob-auditing-policy-name bapname',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', False)])
self.cmd('az synapse sql audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --state Enabled --bsts Enabled --storage-account {storage-account}'
' --retention-days={retention-days} --actions {audit-actions-input} --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('storageEndpoint', self.kwargs['storage-endpoint']),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# update audit policy - disable
self.cmd('az synapse sql audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --state Disabled --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Disabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# create log analytics workspace
self.kwargs['log_analytics_workspace_id']= self.cmd('az monitor log-analytics workspace create --resource-group {rg} '
'--workspace-name {log_analytics_workspace_name}',
checks=[
self.check('name', self.kwargs['log_analytics_workspace_name']),
self.check('provisioningState', 'Succeeded')]).get_output_in_json()['id']
# update audit policy - enable log analytics target
self.cmd('az synapse sql audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --state Enabled'
' --lats Enabled --lawri {log_analytics_workspace_id} '
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify logAnalyticsTargetState is enabled and isAzureMonitorTargetEnabled is true
self.cmd('az synapse sql audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Enabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', True)])
# update audit policy - disable log analytics target
self.cmd('az synapse sql audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --state Enabled --lats Disabled'
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify logAnalyticsTargetState is disabled and isAzureMonitorTargetEnabled is false
self.cmd('az synapse sql audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --blob-auditing-policy-name bapname',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', False)])
# create event hub namespace
self.cmd('az eventhubs namespace create --resource-group {rg} -n {eventhub_namespace} --location eastus',
checks=[
self.check('provisioningState', 'Succeeded')])
# create event hub
self.cmd('az eventhubs eventhub create --resource-group {rg} -n {eventhub_name} --namespace-name {eventhub_namespace}',
checks=[
self.check('status', 'Active')])
# create event hub autorization rule
self.kwargs['eventhub_auth_rule_id'] = self.cmd(
'az eventhubs namespace authorization-rule create --resource-group {rg} -n {eventhub_auth_rule} '
'--namespace-name {eventhub_namespace} --rights Listen Manage Send').get_output_in_json()['id']
# update audit policy - enable event hub target
self.cmd('az synapse sql audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --state Enabled --event-hub-target-state Enabled'
' --ehari {eventhub_auth_rule_id} --event-hub {eventhub_name}'
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify eventHubTargetState is enabled and isAzureMonitorTargetEnabled is true
self.cmd('az synapse sql audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Enabled'),
self.check('isAzureMonitorTargetEnabled', True)])
# update audit policy - disable event hub target
self.cmd('az synapse sql audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --state Enabled --event-hub-target-state Disabled --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify eventHubTargetState is disabled and isAzureMonitorTargetEnabled is false
self.cmd('az synapse sql audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('isAzureMonitorTargetEnabled', False),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', False)])
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_sql_pool_audit_policy_logentry_eventhub(self):
self.kwargs.update({
'location': 'eastus',
'log_analytics_workspace_name': self.create_random_name("laws", 20),
'sql-pool': self.create_random_name(prefix='testsqlpool', length=15),
'performance-level': 'DW400c',
'retention-days': '30',
'audit-actions-expected': ['SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP'],
'audit-actions-input': 'SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP',
'eventhub_name': self.create_random_name("ehsrv", 20),
'eventhub_namespace': self.create_random_name("ehnamespace", 20),
'eventhub_auth_rule': self.create_random_name("ehauthruledb", 20),
})
# create a workspace
self._create_workspace()
# create sql pool
sql_pool = self.cmd(
'az synapse sql pool create --name {sql-pool} --performance-level {performance-level} '
'--workspace {workspace} --resource-group {rg}', checks=[
self.check('name', self.kwargs['sql-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/sqlPools'),
self.check('provisioningState', 'Succeeded'),
self.check('status', 'Online')
]).get_output_in_json()
self.kwargs['storage-endpoint'] = self._get_storage_endpoint(self.kwargs['storage-account'], self.kwargs['rg'])
self.kwargs['storage-key'] = self._get_storage_key(self.kwargs['storage-account'], self.kwargs['rg'])
# test show command
self.cmd('az synapse sql pool audit-policy show '
'--workspace-name {workspace} --resource-group {rg} --name {sql-pool} ',
checks=[
self.check('state', 'Disabled')
])
# update audit policy - enable
self.cmd('az synapse sql pool audit-policy update --resource-group {rg} --workspace-name {workspace} --name {sql-pool} '
' --state Enabled --bsts Enabled --storage-key {storage-key} --storage-endpoint={storage-endpoint}'
' --retention-days={retention-days} --actions {audit-actions-input} --blob-auditing-policy-name bapname',
checks=[
self.check('state', 'Enabled'),
self.check('storageEndpoint', self.kwargs['storage-endpoint']),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy
self.cmd('az synapse sql pool audit-policy show '
'--workspace-name {workspace} --resource-group {rg} --name {sql-pool} --blob-auditing-policy-name bapname',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', False)])
self.cmd('az synapse sql pool audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool} --state Enabled --bsts Enabled --storage-account {storage-account}'
' --retention-days={retention-days} --actions {audit-actions-input} --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('storageEndpoint', self.kwargs['storage-endpoint']),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# update audit policy - disable
self.cmd('az synapse sql pool audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool} --state Disabled --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Disabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# create log analytics workspace
self.kwargs['log_analytics_workspace_id']= self.cmd('az monitor log-analytics workspace create --resource-group {rg} '
'--workspace-name {log_analytics_workspace_name}',
checks=[
self.check('name', self.kwargs['log_analytics_workspace_name']),
self.check('provisioningState',
'Succeeded')]).get_output_in_json()['id']
# update audit policy - enable log analytics target
self.cmd('az synapse sql pool audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool} --state Enabled'
' --lats Enabled --lawri {log_analytics_workspace_id} '
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify logAnalyticsTargetState is enabled and isAzureMonitorTargetEnabled is true
self.cmd('az synapse sql pool audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool} ',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Enabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', True)])
# update audit policy - disable log analytics target
self.cmd('az synapse sql pool audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool} --state Enabled --lats Disabled'
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify logAnalyticsTargetState is disabled and isAzureMonitorTargetEnabled is false
self.cmd('az synapse sql pool audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool}',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', False)])
# create event hub namespace
self.cmd('az eventhubs namespace create --resource-group {rg} -n {eventhub_namespace} --location eastus',
checks=[
self.check('provisioningState', 'Succeeded')])
# create event hub
self.cmd('az eventhubs eventhub create --resource-group {rg} -n {eventhub_name} --namespace-name {eventhub_namespace}',
checks=[
self.check('status', 'Active')])
# create event hub autorization rule
self.kwargs['eventhub_auth_rule_id'] = self.cmd(
'az eventhubs namespace authorization-rule create --resource-group {rg} -n {eventhub_auth_rule} '
'--namespace-name {eventhub_namespace} --rights Listen Manage Send').get_output_in_json()['id']
# update audit policy - enable event hub target
self.cmd('az synapse sql pool audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool} --state Enabled --event-hub-target-state Enabled'
' --ehari {eventhub_auth_rule_id} --event-hub {eventhub_name}'
' --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify eventHubTargetState is enabled and isAzureMonitorTargetEnabled is true
self.cmd('az synapse sql pool audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool}',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Enabled'),
self.check('isAzureMonitorTargetEnabled', True)])
# update audit policy - disable event hub target
self.cmd('az synapse sql pool audit-policy update --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool} --state Enabled --event-hub-target-state Disabled --blob-auditing-policy-name bapn',
checks=[
self.check('state', 'Enabled'),
self.check('retentionDays', self.kwargs['retention-days']),
self.check('auditActionsAndGroups', self.kwargs['audit-actions-expected'])])
# get audit policy - verify eventHubTargetState is disabled and isAzureMonitorTargetEnabled is false
self.cmd('az synapse sql pool audit-policy show --resource-group {rg} --workspace-name {workspace}'
' --name {sql-pool}',
checks=[
self.check('state', 'Enabled'),
self.check('blobStorageTargetState', 'Enabled'),
self.check('logAnalyticsTargetState', 'Disabled'),
self.check('eventHubTargetState', 'Disabled'),
self.check('isAzureMonitorTargetEnabled', False)])
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_sql_aad_admin(self):
self.kwargs.update({
'location': 'eastus',
'user-name': 'fakeuser',
'object-id': '00000000-0000-4002-becf-488f3e6ab703',
'user-email': 'fakeuser@fakedomain.com'
})
# create a workspace
self._create_workspace()
# Test create cmdlet
self.cmd('az synapse sql ad-admin create --workspace-name {workspace} --resource-group {rg} '
'--display-name {user-name} --object-id {object-id}',
checks=[
self.check('login', self.kwargs['user-name'])
])
# Test show cmdlet
self.cmd('az synapse sql ad-admin show --workspace-name {workspace} --resource-group {rg}',
checks=[
self.check('login', self.kwargs['user-name']),
self.check('name', 'activeDirectory')
])
# Test update cmdlet
self.cmd('az synapse sql ad-admin update --workspace-name {workspace} --resource-group {rg} '
'--display-name {user-email}',
checks=[
self.check('login', self.kwargs['user-email'])
])
# Test delete cmdlet
self.cmd('az synapse sql ad-admin delete --workspace-name {workspace} --resource-group {rg} -y')
self.cmd('az synapse sql ad-admin show --workspace-name {workspace} --resource-group {rg}', expect_failure=True)
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_ip_firewall_rules(self, resource_group, storage_account):
self.kwargs.update({
'ruleName': self.create_random_name(prefix='rule', length=8),
'startIpAddress': "0.0.0.0",
'endIpAddress': "255.255.255.255",
'secondIpAddress': "192.0.0.1"
})
# create a workspace
self._create_workspace()
# check workspace name
self.cmd('az synapse workspace check-name --name {workspace}', checks=[
self.check('available', False)
])
# create a firewall rule
self.cmd(
'az synapse workspace firewall-rule create --name {ruleName} --workspace-name {workspace} '
'--resource-group {rg} --start-ip-address {startIpAddress} --end-ip-address {endIpAddress}',
checks=[
self.check('name', self.kwargs['ruleName']),
self.check('type', 'Microsoft.Synapse/workspaces/firewallRules'),
self.check('provisioningState', 'Succeeded')
])
# get a firewall rule
self.cmd(
'az synapse workspace firewall-rule show --name {ruleName} --workspace-name {workspace} '
'--resource-group {rg}',
checks=[
self.check('name', self.kwargs['ruleName']),
self.check('type', 'Microsoft.Synapse/workspaces/firewallRules'),
self.check('provisioningState', 'Succeeded')
])
# update a firewall rule
self.cmd(
'az synapse workspace firewall-rule update --name {ruleName} --workspace-name {workspace} '
'--resource-group {rg} --start-ip-address {secondIpAddress}',
checks=[
self.check('name', self.kwargs['ruleName']),
self.check('startIpAddress', self.kwargs['secondIpAddress']),
self.check('type', 'Microsoft.Synapse/workspaces/firewallRules'),
self.check('provisioningState', 'Succeeded')
])
# list all firewall rules under a specific workspace
self.cmd('az synapse workspace firewall-rule list --workspace-name {workspace} --resource-group {rg}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/firewallRules')
])
# delete a firewall rule
self.cmd(
'az synapse workspace firewall-rule delete --name {ruleName} --workspace-name {workspace} '
'--resource-group {rg} --yes')
import time
time.sleep(20)
self.cmd('az synapse workspace firewall-rule show --name {ruleName} --workspace-name {workspace} '
'--resource-group {rg}', expect_failure=True)
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
def test_spark_job(self, resource_group):
self.kwargs.update({
'spark-pool': 'testsparkpool',
'workspace': 'testsynapseworkspace',
'job': 'WordCount_Java',
'main-definition-file': 'abfss://testfilesystem@adlsgen2account.dfs.core.windows.net/samples/java/wordcount/wordcount.jar',
'main-class-name': 'WordCount',
'arguments': [
'abfss://testfilesystem@adlsgen2account.dfs.core.windows.net/samples/java/wordcount/shakespeare.txt',
'abfss://testfilesystem@adlsgen2account.dfs.core.windows.net/samples/java/wordcount/result/'],
'executors': 2,
'executor-size': 'Medium',
'configuration': '{\\"spark.dynamicAllocation.maxExecutors\\":\\"18\\"}'
})
# create a spark batch job
batch_job = self.cmd('az synapse spark job submit --name {job} --workspace-name {workspace} '
'--spark-pool-name {spark-pool} --main-definition-file {main-definition-file} '
'--main-class-name {main-class-name} --arguments {arguments} '
'--executors {executors} --executor-size {executor-size} --configuration {configuration} ',
checks=[self.check('name', self.kwargs['job']),
self.check('jobType', 'SparkBatch'),
self.check('state', 'not_started'),
self.check('livyInfo.jobCreationRequest.configuration',
'{{\'spark.dynamicAllocation.maxExecutors\': \'18\'}}')
]).get_output_in_json()
self.kwargs['batch-id'] = batch_job['id']
# get a spark batch job with batch id
self.cmd('az synapse spark job show --livy-id {batch-id} --workspace-name {workspace} '
'--spark-pool-name {spark-pool}', checks=[self.check('id', self.kwargs['batch-id'])])
# list all spark batch jobs under a specific spark pool
self.cmd('az synapse spark job list --workspace-name {workspace} '
'--spark-pool-name {spark-pool}',
checks=[
self.check('sessions[0].jobType', 'SparkBatch')
])
# cancel a spark batch job with batch id
self.cmd('az synapse spark job cancel --livy-id {batch-id} --workspace-name {workspace} '
'--spark-pool-name {spark-pool} --yes')
import time
time.sleep(60)
self.cmd('az synapse spark job show --livy-id {batch-id} --workspace-name {workspace} '
'--spark-pool-name {spark-pool}',
checks=[
self.check('result', 'Cancelled')
])
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
def test_spark_session_and_statements(self, resource_group):
self.kwargs.update({
'spark-pool': 'testsparkpool',
'workspace': 'testsynapseworkspace',
'job': self.create_random_name(prefix='clisession', length=14),
'executor-size': 'Small',
'executors': 2,
'code': "\"import time\ntime.sleep(10)\nprint('hello from cli')\"",
'language': 'pyspark'
})
# create a spark session
create_result = self.cmd('az synapse spark session create --name {job} --workspace-name {workspace} '
'--spark-pool-name {spark-pool} --executor-size {executor-size} '
'--executors {executors}',
checks=[
self.check('jobType', 'SparkSession'),
self.check('name', self.kwargs['job']),
self.check('state', 'not_started')
]).get_output_in_json()
self.kwargs['session-id'] = create_result['id']
# wait for creating spark session
import time
time.sleep(360)
# get a spark session
self.cmd('az synapse spark session show --livy-id {session-id} --workspace-name {workspace} '
'--spark-pool-name {spark-pool}',
checks=[
self.check('id', self.kwargs['session-id']),
self.check('state', 'idle')
])
# list all spark session jobs under a specific spark pook
self.cmd('az synapse spark session list --workspace-name {workspace} '
'--spark-pool-name {spark-pool}',
checks=[
self.check('sessions[0].jobType', 'SparkSession')
])
# reset spark session's timeout time
self.cmd('az synapse spark session reset-timeout --livy-id {session-id} --workspace-name {workspace} '
'--spark-pool-name {spark-pool}')
# create a spark session statement job
statement = self.cmd('az synapse spark statement invoke --session-id {session-id} '
'--workspace-name {workspace} --spark-pool-name {spark-pool} '
'--code {code} --language {language}',
checks=[
self.check('state', 'waiting')
]).get_output_in_json()
self.kwargs['statement-id'] = statement['id']
time.sleep(10)
# get a spark session statement
self.cmd('az synapse spark statement show --livy-id {statement-id} --session-id {session-id} '
'--workspace-name {workspace} --spark-pool-name {spark-pool}',
checks=[
self.check('state', 'running')
])
# list all spark session statements under a specific spark session
self.cmd('az synapse spark statement list --session-id {session-id} '
'--workspace-name {workspace} --spark-pool-name {spark-pool}',
checks=[
self.check('statements[0].state', 'running')
])
# cancel a spark session statement
self.cmd('az synapse spark statement cancel --livy-id {statement-id} --session-id {session-id} '
'--workspace-name {workspace} --spark-pool-name {spark-pool} --yes',
checks=[
self.check('msg', 'canceled')
])
# delete/cancel a spark session
self.cmd('az synapse spark session cancel --livy-id {session-id} --workspace-name {workspace} '
'--spark-pool-name {spark-pool} --yes')
import time
time.sleep(120)
self.cmd('az synapse spark session show --livy-id {session-id} --workspace-name {workspace} '
'--spark-pool-name {spark-pool}',
checks=[
self.check('state', 'killed')
])
@record_only()
def test_access_control(self):
self.kwargs.update({
'workspace': 'clitestsynapseworkspace',
'role': 'Synapse Contributor',
'userPrincipal': 'username@contoso.com',
'servicePrincipal': 'testsynapsecli',
'scopeName': 'workspaces/{workspaceName}/bigDataPools/{bigDataPoolName}',
'itemType': 'bigDataPools',
'item': 'testitem'})
self.cmd(
'az synapse role scope list --workspace-name {workspace} ',
checks=[
self.check("contains([], '{scopeName}')", True)
]
)
self.cmd(
'az synapse role definition list --workspace-name {workspace}',
checks=[
self.check('[0].name', 'Synapse Administrator')
])
# get role definition
role_definition_get = self.cmd(
'az synapse role definition show --workspace-name {workspace} --role "{role}" ',
checks=[
self.check('name', self.kwargs['role'])
]).get_output_in_json()
self.kwargs['roleId'] = role_definition_get['id']
# create role assignment
role_assignment_create = self.cmd(
'az synapse role assignment create --workspace-name {workspace} --role "{role}" '
'--assignee {servicePrincipal} --assignment-id 0550e787-7841-4669-9ac8-a8176e900002',
checks=[
self.check('roleDefinitionId', self.kwargs['roleId'])
]).get_output_in_json()
self.kwargs['roleAssignmentId'] = role_assignment_create['id']
self.kwargs['roleId'] = role_assignment_create['roleDefinitionId']
self.kwargs['principalId'] = role_assignment_create['principalId']
# create role assignment at scope
self.cmd(
'az synapse role assignment create --workspace-name {workspace} --role "{role}" '
'--assignee {servicePrincipal} --item-type {itemType} --item {item} '
'--assignment-id 0333e787-7841-4669-9ac8-a8176e900002',
checks=[
self.check('roleDefinitionId', self.kwargs['roleId']),
self.check('scope', 'workspaces/{workspace}/{itemType}/{item}')
])
# get role assignment
self.cmd(
'az synapse role assignment show --workspace-name {workspace} --id {roleAssignmentId} ',
checks=[
self.check('roleDefinitionId', self.kwargs['roleId']),
self.check('principalId', self.kwargs['principalId'])
])
# list role assignment by role and scope
self.cmd(
'az synapse role assignment list --workspace-name {workspace} --role "{role}" --item-type {itemType} --item {item}',
checks=[
self.check("length([])", 2)
])
# list role assignment by servicePrincipal
self.cmd(
'az synapse role assignment list --workspace-name {workspace} --assignee {servicePrincipal} ',
checks=[
self.check("length([])", 2)
])
# list role assignment by object_id
self.cmd(
'az synapse role assignment list --workspace-name {workspace} --assignee-object-id {principalId} ',
checks=[
self.check("length([])", 2)
])
# delete role assignment
self.cmd(
'az synapse role assignment delete --workspace-name {workspace} --ids {roleAssignmentId} -y ')
self.cmd(
'az synapse role assignment show --workspace-name {workspace} --id {roleAssignmentId} ',
expect_failure=True)
def _create_workspace(self, *additional_create_params):
self.kwargs.update({
'workspace': self.create_random_name(prefix='clitest', length=16),
'location': self.location,
'file-system': 'testfilesystem',
'login-user': 'cliuser1',
'login-password': self.create_random_name(prefix='Pswd1', length=16)
})
# create synapse workspace
self.cmd(
'az synapse workspace create --name {workspace} --resource-group {rg} --storage-account {storage-account} '
'--file-system {file-system} --sql-admin-login-user {login-user} '
'--sql-admin-login-password {login-password}'
' --location {location} ' + ' '.join(additional_create_params), checks=[
self.check('name', self.kwargs['workspace']),
self.check('type', 'Microsoft.Synapse/workspaces'),
self.check('provisioningState', 'Succeeded')
])
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_linked_service(self):
self.kwargs.update({
'name': 'linkedservice',
'file': os.path.join(os.path.join(os.path.dirname(__file__), 'assets'), 'linkedservice.json')
})
# create a workspace
self._create_workspace()
# create firewall rule
self.cmd(
'az synapse workspace firewall-rule create --resource-group {rg} --name allowAll --workspace-name {workspace} '
'--start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255', checks=[
self.check('provisioningState', 'Succeeded')
]
)
import time
time.sleep(20)
# create linked service
self.cmd(
'az synapse linked-service create --workspace-name {workspace} --name {name} --file @"{file}"',
checks=[
self.check('name', self.kwargs['name'])
])
# get linked service
self.cmd(
'az synapse linked-service show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list linked service
self.cmd(
'az synapse linked-service list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/linkedservices')
])
# delete linked service
self.cmd(
'az synapse linked-service delete --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse linked-service show --workspace-name {workspace} --name {name}',
expect_failure=True)
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_dataset(self):
self.kwargs.update({
'name': 'dataset'})
# create a workspace
self._create_workspace()
# create firewall rule
self.cmd(
'az synapse workspace firewall-rule create --resource-group {rg} --name allowAll --workspace-name {workspace} '
'--start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255', checks=[
self.check('provisioningState', 'Succeeded')
]
)
import time
time.sleep(20)
self.kwargs['file'] = ('{\\"properties\\":{\\"linkedServiceName\\":{\\"referenceName\\":\\"' + self.kwargs[
'workspace'] + '-WorkspaceDefaultStorage\\",'
'\\"type\\":\\"LinkedServiceReference\\"},\\"type\\":\\"Orc\\",\\"typeProperties\\":{\\"location\\":{\\"type\\":\\"AzureBlobFSLocation\\"}}}}')
# create dataset
self.cmd(
'az synapse dataset create --workspace-name {workspace} --name {name} --file {file}',
checks=[
self.check('name', self.kwargs['name'])
])
# get dataset
self.cmd(
'az synapse dataset show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list dataset
self.cmd(
'az synapse dataset list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/datasets')
])
# delete dataset
self.cmd(
'az synapse dataset delete --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse dataset show --workspace-name {workspace} --name {name}',
expect_failure=True)
@record_only()
def test_pipeline(self):
self.kwargs.update({
'workspace': 'testsynapseworkspace',
'name': 'pipeline',
'file': os.path.join(os.path.join(os.path.dirname(__file__), 'assets'), 'pipeline.json')
})
# create pipeline
self.cmd(
'az synapse pipeline create --workspace-name {workspace} --name {name} --file @"{file}"',
checks=[
self.check('name', self.kwargs['name'])
])
# get pipeline
self.cmd(
'az synapse pipeline show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list pipeline
self.cmd(
'az synapse pipeline list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/pipelines')
])
# create pipeline run
pipeline_run = self.cmd(
'az synapse pipeline create-run --workspace-name {workspace} --name {name}').get_output_in_json()
self.kwargs['runId'] = pipeline_run['runId']
# cancel pipeline run
self.cmd(
'az synapse pipeline-run cancel --workspace-name {workspace} --run-id {runId} -y')
import time
time.sleep(20)
# get pipeline run by run id
self.cmd(
'az synapse pipeline-run show --workspace-name {workspace} --run-id {runId}',
checks=[
self.check('status', 'Cancelled')
])
# get pipeline run by workspace
self.cmd(
'az synapse pipeline-run query-by-workspace --workspace-name {workspace} '
'--last-updated-after 2020-09-01T00:36:44.3345758Z --last-updated-before 2020-10-16T00:36:44.3345758Z')
# get acticity run
self.cmd(
'az synapse activity-run query-by-pipeline-run --workspace-name {workspace} --name {name} --run-id {runId} '
'--last-updated-after 2020-09-01T00:36:44.3345758Z --last-updated-before 2020-10-16T00:36:44.3345758Z')
# delete pipeline
self.cmd(
'az synapse pipeline delete --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse pipeline show --workspace-name {workspace} --name {name}',
expect_failure=True)
@record_only()
def test_trigger(self):
self.kwargs.update({
'workspace': 'testsynapseworkspace',
'name': 'trigger',
'event-trigger': 'EventTrigger',
'tumbling-window-trigger': 'TumblingWindowTrigger',
'run-id': '08585700206218758559786000276CU61',
'file': os.path.join(os.path.join(os.path.dirname(__file__), 'assets'), 'trigger.json')
})
# create trigger
self.cmd(
'az synapse trigger create --workspace-name {workspace} --name {name} --file @"{file}"',
checks=[
self.check('name', self.kwargs['name'])
])
# get trigger
self.cmd(
'az synapse trigger show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list trigger
self.cmd(
'az synapse trigger list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/triggers')
])
# delete trigger
self.cmd(
'az synapse trigger delete --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse trigger show --workspace-name {workspace} --name {name}',
expect_failure=True)
# subscribe to event
self.cmd(
'az synapse trigger subscribe-to-event --workspace-name {workspace} --name {event-trigger}',
checks=[
self.check('status', 'Provisioning')
])
import time
time.sleep(20)
# get event subscription status
self.cmd(
'az synapse trigger get-event-subscription-status --workspace-name {workspace} --name {event-trigger}',
checks=[
self.check('status', 'Enabled')
])
# unsubscribe from event
self.cmd(
'az synapse trigger unsubscribe-from-event --workspace-name {workspace} --name {event-trigger}',
checks=[
self.check('status', 'Deprovisioning')
])
# start a trigger
self.cmd(
'az synapse trigger start --workspace-name {workspace} --name {tumbling-window-trigger}')
# get trigger run by workspace
self.cmd(
'az synapse trigger-run query-by-workspace --workspace-name {workspace} '
'--last-updated-after 2020-09-01T00:36:44.3345758Z --last-updated-before 2020-10-01T00:36:44.3345758Z')
# rerun a trigger
self.cmd(
'az synapse trigger-run rerun --workspace-name {workspace} --name {tumbling-window-trigger} --run-id {run-id}')
# stop a trigger
self.cmd(
'az synapse trigger stop --workspace-name {workspace} --name {tumbling-window-trigger}')
@record_only()
@unittest.skip('(InvalidTokenIssuer) Token Authentication failed with SecurityTokenInvalidIssuerException')
def test_data_flow(self):
self.kwargs.update({
'workspace': 'testsynapseworkspace',
'name': 'dataflow',
'file': os.path.join(os.path.join(os.path.dirname(__file__), 'assets'), 'dataflow.json')
})
# create data flow
self.cmd(
'az synapse data-flow create --workspace-name {workspace} --name {name} --file @"{file}"',
checks=[
self.check('name', self.kwargs['name'])
])
# get data flow
self.cmd(
'az synapse data-flow show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list data flow
self.cmd(
'az synapse data-flow list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/dataflows')
])
# delete data flow
self.cmd(
'az synapse data-flow delete --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse data-flow show --workspace-name {workspace} --name {name}',
expect_failure=True)
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_notebook(self):
self.kwargs.update({
'workspace': 'testsynapseworkspace',
'name': 'notebook',
'spark-pool': 'testpool',
'spark-version': '2.4',
'file': os.path.join(os.path.join(os.path.dirname(__file__), 'assets'), 'notebook.ipynb')
})
# create a workspace
self._create_workspace()
# create firewall rule
self.cmd(
'az synapse workspace firewall-rule create --resource-group {rg} --name allowAll --workspace-name {workspace} '
'--start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255', checks=[
self.check('provisioningState', 'Succeeded')
]
)
# create spark pool
self.cmd('az synapse spark pool create --name {spark-pool} --spark-version {spark-version}'
' --workspace {workspace} --resource-group {rg} --node-count 3 --node-size Medium',
checks=[
self.check('name', self.kwargs['spark-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/bigDataPools'),
self.check('provisioningState', 'Succeeded')
]).get_output_in_json()
# create notebook
self.cmd(
'az synapse notebook create --workspace-name {workspace} --name {name} --file @"{file}" '
'--spark-pool-name {spark-pool}',
checks=[
self.check('name', self.kwargs['name'])
])
# get notebook
self.cmd(
'az synapse notebook show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list notebook
self.cmd(
'az synapse notebook list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/notebooks')
])
# export notebook
self.kwargs['output-folder'] = os.getcwd()
self.cmd(
'az synapse notebook export --workspace-name {workspace} --name {name} '
'--output-folder "{output-folder}"')
file_path = os.path.join(self.kwargs['output-folder'], self.kwargs['name'] + '.ipynb')
self.assertTrue(os.path.isfile(file_path))
os.remove(file_path)
# delete notebook
self.cmd(
'az synapse notebook delete --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse notebook show --workspace-name {workspace} --name {name}',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_workspace_package(self):
self.kwargs.update({
'name': 'wordcount.jar',
'file': os.path.join(os.path.join(os.path.dirname(__file__), 'assets'), 'wordcount.jar')
})
# create a workspace
self._create_workspace()
# create firewall rule
self.cmd(
'az synapse workspace firewall-rule create --resource-group {rg} --name allowAll --workspace-name {workspace} '
'--start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255', checks=[
self.check('provisioningState', 'Succeeded')
]
)
import time
time.sleep(20)
# upload workspace package
self.cmd(
'az synapse workspace-package upload --workspace-name {workspace} --package "{file}"',
checks=[
self.check('name', self.kwargs['name'])
])
# get workspace package
self.cmd(
'az synapse workspace-package show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list workspace package
self.cmd(
'az synapse workspace-package list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/libraries')
])
# delete workspace package
self.cmd(
'az synapse workspace-package delete --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse workspace-package show --workspace-name {workspace} --name {name}',
expect_failure=True)
@record_only()
def test_integration_runtime(self):
self.kwargs.update({
'rg': 'rgtesting',
'workspace': 'testingsynapseworkspace',
'name': 'integrationruntime',
'selfhosted-name': 'selfhostedir',
'selfhosted-integration-runtime': 'SelfHostedIntegrationRuntime',
'ssisirname': 'testssisir'})
# create managed integration runtime
self.cmd(
'az synapse integration-runtime managed create --resource-group {rg} --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# create self-hosted integration runtime
self.cmd(
'az synapse integration-runtime self-hosted create --resource-group {rg} --workspace-name {workspace} --name {selfhosted-name}',
checks=[
self.check('name', self.kwargs['selfhosted-name'])
])
# get integration runtime
self.cmd(
'az synapse integration-runtime show --resource-group {rg} --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list integration runtime
self.cmd(
'az synapse integration-runtime list --resource-group {rg} --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/integrationruntimes')
])
# delete integration runtime
self.cmd(
'az synapse integration-runtime delete --resource-group {rg} --workspace-name {workspace} --name {name} -y')
self.cmd(
'az synapse integration-runtime show --resource-group {rg} --workspace-name {workspace} --name {name}',
expect_failure=True)
# upgrade self-hosted integration runtime
self.cmd(
'az synapse integration-runtime upgrade --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime}')
# get keys for a self-hosted integration runtime
key = self.cmd(
'az synapse integration-runtime list-auth-key --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime}').get_output_in_json()
assert key['authKey1'] is not None
# regenerate self-hosted integration runtime key
key = self.cmd(
'az synapse integration-runtime regenerate-auth-key --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime} '
'--key-name authKey1').get_output_in_json()
assert key['authKey1'] is not None
assert key['authKey2'] is None
# get metric data for a self-hosted integration runtime
self.cmd(
'az synapse integration-runtime get-monitoring-data --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime}',
checks=[
self.check('name', self.kwargs['selfhosted-integration-runtime'])
])
# skip self-hosted integration runtime node test because it need real ir hosted computer
# get self-hosted integration runtime node information
#self.cmd(
# 'az synapse integration-runtime-node show --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime} '
# '--node-name {node}',
# checks=[
# self.check('nodeName', self.kwargs['node'])
# ])
# update self-hosted integration runtime node
#self.cmd(
# 'az synapse integration-runtime-node update --resource-group {rg} --workspace-name {workspace} \
# --name {selfhosted-integration-runtime} --node-name {node} --auto-update On --update-delay-offset PT03H',
# checks=[
# self.check('nodeName', self.kwargs['node'])
# ])
# get self-hosted integration runtime node ip
#self.cmd(
# 'az synapse integration-runtime-node get-ip-address --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime} '
# '--node-name {node}')
# sync credentials among integration runtime nodes
self.cmd(
'az synapse integration-runtime sync-credentials --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime}')
# get connection info
#self.cmd(
# 'az synapse integration-runtime get-connection-info --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime}')
# get status
self.cmd(
'az synapse integration-runtime get-status --resource-group {rg} --workspace-name {workspace} --name {selfhosted-integration-runtime}',
checks=[
self.check('name', self.kwargs['selfhosted-integration-runtime'])
])
# start/stop ssis integration runtime
self.cmd(
'az synapse integration-runtime start --resource-group {rg} --workspace-name {workspace} --name {ssisirname}',
checks=[
self.check('properties.state', 'Started')
])
self.cmd(
'az synapse integration-runtime stop --resource-group {rg} --workspace-name {workspace} --name {ssisirname} -y')
def _get_storage_endpoint(self, storage_account, resource_group):
return self.cmd('az storage account show -g {} -n {}'
' --query primaryEndpoints.blob'
.format(resource_group, storage_account)).get_output_in_json()
def _get_storage_key(self, storage_account, resource_group):
return self.cmd('az storage account keys list -g {} -n {} --query [0].value'
.format(resource_group, storage_account)).get_output_in_json()
@record_only()
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
def test_managed_private_endpoints(self):
self.kwargs.update({
'workspace': 'testsynapseworkspacepe',
'name': 'AzureDataLakeStoragePE',
'privateLinkResourceId': '/subscriptions/051ddeca-1ed6-4d8b-ba6f-1ff561e5f3b3/resourceGroups/bigdataqa/providers/Microsoft.Storage/storageAccounts/hozhao0917gen2',
'groupId': 'dfs'})
# create managed private endpoint
self.cmd(
'az synapse managed-private-endpoints create --workspace-name {workspace} --pe-name {name} --resource-id {privateLinkResourceId} --group-Id {groupId}',
checks=[
self.check('name', self.kwargs['name'])
])
# wait some time to improve robustness
if self.is_live or self.in_recording:
import time
time.sleep(90)
# get managed private endpoint
self.cmd(
'az synapse managed-private-endpoints show --workspace-name {workspace} --pe-name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# list managed private endpoint
self.cmd(
'az synapse managed-private-endpoints list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/managedVirtualNetworks/managedPrivateEndpoints')
])
# delete managed private endpoint
self.cmd(
'az synapse managed-private-endpoints delete --workspace-name {workspace} --pe-name {name} -y')
if self.is_live or self.in_recording:
import time
time.sleep(60)
self.cmd(
'az synapse managed-private-endpoints show --workspace-name {workspace} --pe-name {name}',
expect_failure=True)
@ResourceGroupPreparer(name_prefix='synapse-cli', random_name_length=16)
@StorageAccountPreparer(name_prefix='adlsgen2', length=16, location=location, key='storage-account')
def test_spark_job_definition(self):
self.kwargs.update({
'name': 'SparkAutoCreate1',
'spark-pool': 'testpool',
'spark-version': '2.4',
'file': os.path.join(os.path.join(os.path.dirname(__file__), 'assets'), 'sparkjobdefinition.json')
})
# create a workspace
self._create_workspace()
# create firewall rule
self.cmd(
'az synapse workspace firewall-rule create --resource-group {rg} --name allowAll --workspace-name {workspace} '
'--start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255', checks=[
self.check('provisioningState', 'Succeeded')
]
)
# create spark pool
self.cmd('az synapse spark pool create --name {spark-pool} --spark-version {spark-version}'
' --workspace {workspace} --resource-group {rg} --node-count 3 --node-size Medium',
checks=[
self.check('name', self.kwargs['spark-pool']),
self.check('type', 'Microsoft.Synapse/workspaces/bigDataPools'),
self.check('provisioningState', 'Succeeded')
]).get_output_in_json()
# create a spark job definition
self.cmd(
'az synapse spark-job-definition create --workspace-name {workspace} --name {name} --file @"{file}" ',
checks=[
self.check('name', self.kwargs['name'])
])
# Get a spark job definition
self.cmd(
'az synapse spark-job-definition show --workspace-name {workspace} --name {name}',
checks=[
self.check('name', self.kwargs['name'])
])
# List spark job definitions
self.cmd(
'az synapse spark-job-definition list --workspace-name {workspace}',
checks=[
self.check('[0].type', 'Microsoft.Synapse/workspaces/sparkjobdefinitions')
])
# delete a spark job definition
self.cmd(
'az synapse spark-job-definition delete --workspace-name {workspace} --name {name}')
self.cmd(
'az synapse spark-job-definition show --workspace-name {workspace} --name {name}',
expect_failure=True)
| 48.402513
| 175
| 0.552058
| 12,417
| 134,801
| 5.937263
| 0.050254
| 0.058354
| 0.036502
| 0.061202
| 0.893208
| 0.874028
| 0.850182
| 0.82205
| 0.787109
| 0.774087
| 0
| 0.00914
| 0.307646
| 134,801
| 2,784
| 176
| 48.419899
| 0.780778
| 0.058597
| 0
| 0.784269
| 0
| 0.071297
| 0.472231
| 0.123648
| 0
| 0
| 0
| 0
| 0.00184
| 1
| 0.017019
| false
| 0.00184
| 0.00874
| 0.00092
| 0.027599
| 0.00046
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a496da5feef388403f14f9b8bf405572bb892ff
| 87,776
|
py
|
Python
|
sdk/python/pulumi_azure_native/web/v20210101/web_app_auth_settings_slot.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/v20210101/web_app_auth_settings_slot.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/v20210101/web_app_auth_settings_slot.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = ['WebAppAuthSettingsSlotArgs', 'WebAppAuthSettingsSlot']
@pulumi.input_type
class WebAppAuthSettingsSlotArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
slot: pulumi.Input[str],
aad_claims_authorization: Optional[pulumi.Input[str]] = None,
additional_login_params: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_external_redirect_urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
auth_file_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
client_secret_certificate_thumbprint: Optional[pulumi.Input[str]] = None,
client_secret_setting_name: Optional[pulumi.Input[str]] = None,
config_version: Optional[pulumi.Input[str]] = None,
default_provider: Optional[pulumi.Input['BuiltInAuthenticationProvider']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
facebook_app_id: Optional[pulumi.Input[str]] = None,
facebook_app_secret: Optional[pulumi.Input[str]] = None,
facebook_app_secret_setting_name: Optional[pulumi.Input[str]] = None,
facebook_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
git_hub_client_id: Optional[pulumi.Input[str]] = None,
git_hub_client_secret: Optional[pulumi.Input[str]] = None,
git_hub_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
git_hub_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
google_client_id: Optional[pulumi.Input[str]] = None,
google_client_secret: Optional[pulumi.Input[str]] = None,
google_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
google_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_auth_from_file: Optional[pulumi.Input[str]] = None,
issuer: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
microsoft_account_client_id: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
microsoft_account_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
runtime_version: Optional[pulumi.Input[str]] = None,
token_refresh_extension_hours: Optional[pulumi.Input[float]] = None,
token_store_enabled: Optional[pulumi.Input[bool]] = None,
twitter_consumer_key: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret_setting_name: Optional[pulumi.Input[str]] = None,
unauthenticated_client_action: Optional[pulumi.Input['UnauthenticatedClientAction']] = None,
validate_issuer: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a WebAppAuthSettingsSlot resource.
:param pulumi.Input[str] name: Name of web app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] slot: Name of web app slot. If not specified then will default to production slot.
:param pulumi.Input[str] aad_claims_authorization: Gets a JSON string containing the Azure AD Acl settings.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_login_params: Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_audiences: Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_external_redirect_urls: External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:param pulumi.Input[str] auth_file_path: The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
:param pulumi.Input[str] client_id: The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret: The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret_certificate_thumbprint: An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
:param pulumi.Input[str] client_secret_setting_name: The app setting name that contains the client secret of the relying party application.
:param pulumi.Input[str] config_version: The ConfigVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of the control plane for Authentication / Authorization.
:param pulumi.Input['BuiltInAuthenticationProvider'] default_provider: The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
:param pulumi.Input[bool] enabled: <code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
:param pulumi.Input[str] facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret: The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret_setting_name: The app setting name that contains the app secret used for Facebook Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] facebook_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] git_hub_client_id: The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
:param pulumi.Input[str] git_hub_client_secret: The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
:param pulumi.Input[str] git_hub_client_secret_setting_name: The app setting name that contains the client secret of the Github
app used for GitHub Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] git_hub_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
:param pulumi.Input[str] google_client_id: The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret: The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret_setting_name: The app setting name that contains the client secret associated with
the Google web application.
:param pulumi.Input[Sequence[pulumi.Input[str]]] google_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] is_auth_from_file: "true" if the auth config settings should be read from a file,
"false" otherwise
:param pulumi.Input[str] issuer: The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] microsoft_account_client_id: The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret: The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret_setting_name: The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
:param pulumi.Input[Sequence[pulumi.Input[str]]] microsoft_account_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
:param pulumi.Input[str] runtime_version: The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
:param pulumi.Input[float] token_refresh_extension_hours: The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
:param pulumi.Input[bool] token_store_enabled: <code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
:param pulumi.Input[str] twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret: The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret_setting_name: The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
:param pulumi.Input['UnauthenticatedClientAction'] unauthenticated_client_action: The action to take when an unauthenticated client attempts to access the app.
:param pulumi.Input[bool] validate_issuer: Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "slot", slot)
if aad_claims_authorization is not None:
pulumi.set(__self__, "aad_claims_authorization", aad_claims_authorization)
if additional_login_params is not None:
pulumi.set(__self__, "additional_login_params", additional_login_params)
if allowed_audiences is not None:
pulumi.set(__self__, "allowed_audiences", allowed_audiences)
if allowed_external_redirect_urls is not None:
pulumi.set(__self__, "allowed_external_redirect_urls", allowed_external_redirect_urls)
if auth_file_path is not None:
pulumi.set(__self__, "auth_file_path", auth_file_path)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
if client_secret_certificate_thumbprint is not None:
pulumi.set(__self__, "client_secret_certificate_thumbprint", client_secret_certificate_thumbprint)
if client_secret_setting_name is not None:
pulumi.set(__self__, "client_secret_setting_name", client_secret_setting_name)
if config_version is not None:
pulumi.set(__self__, "config_version", config_version)
if default_provider is not None:
pulumi.set(__self__, "default_provider", default_provider)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if facebook_app_id is not None:
pulumi.set(__self__, "facebook_app_id", facebook_app_id)
if facebook_app_secret is not None:
pulumi.set(__self__, "facebook_app_secret", facebook_app_secret)
if facebook_app_secret_setting_name is not None:
pulumi.set(__self__, "facebook_app_secret_setting_name", facebook_app_secret_setting_name)
if facebook_o_auth_scopes is not None:
pulumi.set(__self__, "facebook_o_auth_scopes", facebook_o_auth_scopes)
if git_hub_client_id is not None:
pulumi.set(__self__, "git_hub_client_id", git_hub_client_id)
if git_hub_client_secret is not None:
pulumi.set(__self__, "git_hub_client_secret", git_hub_client_secret)
if git_hub_client_secret_setting_name is not None:
pulumi.set(__self__, "git_hub_client_secret_setting_name", git_hub_client_secret_setting_name)
if git_hub_o_auth_scopes is not None:
pulumi.set(__self__, "git_hub_o_auth_scopes", git_hub_o_auth_scopes)
if google_client_id is not None:
pulumi.set(__self__, "google_client_id", google_client_id)
if google_client_secret is not None:
pulumi.set(__self__, "google_client_secret", google_client_secret)
if google_client_secret_setting_name is not None:
pulumi.set(__self__, "google_client_secret_setting_name", google_client_secret_setting_name)
if google_o_auth_scopes is not None:
pulumi.set(__self__, "google_o_auth_scopes", google_o_auth_scopes)
if is_auth_from_file is not None:
pulumi.set(__self__, "is_auth_from_file", is_auth_from_file)
if issuer is not None:
pulumi.set(__self__, "issuer", issuer)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if microsoft_account_client_id is not None:
pulumi.set(__self__, "microsoft_account_client_id", microsoft_account_client_id)
if microsoft_account_client_secret is not None:
pulumi.set(__self__, "microsoft_account_client_secret", microsoft_account_client_secret)
if microsoft_account_client_secret_setting_name is not None:
pulumi.set(__self__, "microsoft_account_client_secret_setting_name", microsoft_account_client_secret_setting_name)
if microsoft_account_o_auth_scopes is not None:
pulumi.set(__self__, "microsoft_account_o_auth_scopes", microsoft_account_o_auth_scopes)
if runtime_version is not None:
pulumi.set(__self__, "runtime_version", runtime_version)
if token_refresh_extension_hours is not None:
pulumi.set(__self__, "token_refresh_extension_hours", token_refresh_extension_hours)
if token_store_enabled is not None:
pulumi.set(__self__, "token_store_enabled", token_store_enabled)
if twitter_consumer_key is not None:
pulumi.set(__self__, "twitter_consumer_key", twitter_consumer_key)
if twitter_consumer_secret is not None:
pulumi.set(__self__, "twitter_consumer_secret", twitter_consumer_secret)
if twitter_consumer_secret_setting_name is not None:
pulumi.set(__self__, "twitter_consumer_secret_setting_name", twitter_consumer_secret_setting_name)
if unauthenticated_client_action is not None:
pulumi.set(__self__, "unauthenticated_client_action", unauthenticated_client_action)
if validate_issuer is not None:
pulumi.set(__self__, "validate_issuer", validate_issuer)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of web app.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Name of the resource group to which the resource belongs.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def slot(self) -> pulumi.Input[str]:
"""
Name of web app slot. If not specified then will default to production slot.
"""
return pulumi.get(self, "slot")
@slot.setter
def slot(self, value: pulumi.Input[str]):
pulumi.set(self, "slot", value)
@property
@pulumi.getter(name="aadClaimsAuthorization")
def aad_claims_authorization(self) -> Optional[pulumi.Input[str]]:
"""
Gets a JSON string containing the Azure AD Acl settings.
"""
return pulumi.get(self, "aad_claims_authorization")
@aad_claims_authorization.setter
def aad_claims_authorization(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aad_claims_authorization", value)
@property
@pulumi.getter(name="additionalLoginParams")
def additional_login_params(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
"""
return pulumi.get(self, "additional_login_params")
@additional_login_params.setter
def additional_login_params(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_login_params", value)
@property
@pulumi.getter(name="allowedAudiences")
def allowed_audiences(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
"""
return pulumi.get(self, "allowed_audiences")
@allowed_audiences.setter
def allowed_audiences(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_audiences", value)
@property
@pulumi.getter(name="allowedExternalRedirectUrls")
def allowed_external_redirect_urls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
"""
return pulumi.get(self, "allowed_external_redirect_urls")
@allowed_external_redirect_urls.setter
def allowed_external_redirect_urls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_external_redirect_urls", value)
@property
@pulumi.getter(name="authFilePath")
def auth_file_path(self) -> Optional[pulumi.Input[str]]:
"""
The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
"""
return pulumi.get(self, "auth_file_path")
@auth_file_path.setter
def auth_file_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auth_file_path", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret", value)
@property
@pulumi.getter(name="clientSecretCertificateThumbprint")
def client_secret_certificate_thumbprint(self) -> Optional[pulumi.Input[str]]:
"""
An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
"""
return pulumi.get(self, "client_secret_certificate_thumbprint")
@client_secret_certificate_thumbprint.setter
def client_secret_certificate_thumbprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_certificate_thumbprint", value)
@property
@pulumi.getter(name="clientSecretSettingName")
def client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the client secret of the relying party application.
"""
return pulumi.get(self, "client_secret_setting_name")
@client_secret_setting_name.setter
def client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_setting_name", value)
@property
@pulumi.getter(name="configVersion")
def config_version(self) -> Optional[pulumi.Input[str]]:
"""
The ConfigVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of the control plane for Authentication / Authorization.
"""
return pulumi.get(self, "config_version")
@config_version.setter
def config_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_version", value)
@property
@pulumi.getter(name="defaultProvider")
def default_provider(self) -> Optional[pulumi.Input['BuiltInAuthenticationProvider']]:
"""
The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
"""
return pulumi.get(self, "default_provider")
@default_provider.setter
def default_provider(self, value: Optional[pulumi.Input['BuiltInAuthenticationProvider']]):
pulumi.set(self, "default_provider", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
<code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="facebookAppId")
def facebook_app_id(self) -> Optional[pulumi.Input[str]]:
"""
The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_id")
@facebook_app_id.setter
def facebook_app_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "facebook_app_id", value)
@property
@pulumi.getter(name="facebookAppSecret")
def facebook_app_secret(self) -> Optional[pulumi.Input[str]]:
"""
The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_secret")
@facebook_app_secret.setter
def facebook_app_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "facebook_app_secret", value)
@property
@pulumi.getter(name="facebookAppSecretSettingName")
def facebook_app_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the app secret used for Facebook Login.
"""
return pulumi.get(self, "facebook_app_secret_setting_name")
@facebook_app_secret_setting_name.setter
def facebook_app_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "facebook_app_secret_setting_name", value)
@property
@pulumi.getter(name="facebookOAuthScopes")
def facebook_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_o_auth_scopes")
@facebook_o_auth_scopes.setter
def facebook_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "facebook_o_auth_scopes", value)
@property
@pulumi.getter(name="gitHubClientId")
def git_hub_client_id(self) -> Optional[pulumi.Input[str]]:
"""
The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
"""
return pulumi.get(self, "git_hub_client_id")
@git_hub_client_id.setter
def git_hub_client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "git_hub_client_id", value)
@property
@pulumi.getter(name="gitHubClientSecret")
def git_hub_client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
"""
return pulumi.get(self, "git_hub_client_secret")
@git_hub_client_secret.setter
def git_hub_client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "git_hub_client_secret", value)
@property
@pulumi.getter(name="gitHubClientSecretSettingName")
def git_hub_client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the client secret of the Github
app used for GitHub Login.
"""
return pulumi.get(self, "git_hub_client_secret_setting_name")
@git_hub_client_secret_setting_name.setter
def git_hub_client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "git_hub_client_secret_setting_name", value)
@property
@pulumi.getter(name="gitHubOAuthScopes")
def git_hub_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
"""
return pulumi.get(self, "git_hub_o_auth_scopes")
@git_hub_o_auth_scopes.setter
def git_hub_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "git_hub_o_auth_scopes", value)
@property
@pulumi.getter(name="googleClientId")
def google_client_id(self) -> Optional[pulumi.Input[str]]:
"""
The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_id")
@google_client_id.setter
def google_client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_client_id", value)
@property
@pulumi.getter(name="googleClientSecret")
def google_client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_secret")
@google_client_secret.setter
def google_client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_client_secret", value)
@property
@pulumi.getter(name="googleClientSecretSettingName")
def google_client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the client secret associated with
the Google web application.
"""
return pulumi.get(self, "google_client_secret_setting_name")
@google_client_secret_setting_name.setter
def google_client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_client_secret_setting_name", value)
@property
@pulumi.getter(name="googleOAuthScopes")
def google_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_o_auth_scopes")
@google_o_auth_scopes.setter
def google_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "google_o_auth_scopes", value)
@property
@pulumi.getter(name="isAuthFromFile")
def is_auth_from_file(self) -> Optional[pulumi.Input[str]]:
"""
"true" if the auth config settings should be read from a file,
"false" otherwise
"""
return pulumi.get(self, "is_auth_from_file")
@is_auth_from_file.setter
def is_auth_from_file(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "is_auth_from_file", value)
@property
@pulumi.getter
def issuer(self) -> Optional[pulumi.Input[str]]:
"""
The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
"""
return pulumi.get(self, "issuer")
@issuer.setter
def issuer(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "issuer", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter(name="microsoftAccountClientId")
def microsoft_account_client_id(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_id")
@microsoft_account_client_id.setter
def microsoft_account_client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "microsoft_account_client_id", value)
@property
@pulumi.getter(name="microsoftAccountClientSecret")
def microsoft_account_client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_secret")
@microsoft_account_client_secret.setter
def microsoft_account_client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "microsoft_account_client_secret", value)
@property
@pulumi.getter(name="microsoftAccountClientSecretSettingName")
def microsoft_account_client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
"""
return pulumi.get(self, "microsoft_account_client_secret_setting_name")
@microsoft_account_client_secret_setting_name.setter
def microsoft_account_client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "microsoft_account_client_secret_setting_name", value)
@property
@pulumi.getter(name="microsoftAccountOAuthScopes")
def microsoft_account_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
"""
return pulumi.get(self, "microsoft_account_o_auth_scopes")
@microsoft_account_o_auth_scopes.setter
def microsoft_account_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "microsoft_account_o_auth_scopes", value)
@property
@pulumi.getter(name="runtimeVersion")
def runtime_version(self) -> Optional[pulumi.Input[str]]:
"""
The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
"""
return pulumi.get(self, "runtime_version")
@runtime_version.setter
def runtime_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runtime_version", value)
@property
@pulumi.getter(name="tokenRefreshExtensionHours")
def token_refresh_extension_hours(self) -> Optional[pulumi.Input[float]]:
"""
The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
"""
return pulumi.get(self, "token_refresh_extension_hours")
@token_refresh_extension_hours.setter
def token_refresh_extension_hours(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "token_refresh_extension_hours", value)
@property
@pulumi.getter(name="tokenStoreEnabled")
def token_store_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
<code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
"""
return pulumi.get(self, "token_store_enabled")
@token_store_enabled.setter
def token_store_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "token_store_enabled", value)
@property
@pulumi.getter(name="twitterConsumerKey")
def twitter_consumer_key(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_key")
@twitter_consumer_key.setter
def twitter_consumer_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "twitter_consumer_key", value)
@property
@pulumi.getter(name="twitterConsumerSecret")
def twitter_consumer_secret(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_secret")
@twitter_consumer_secret.setter
def twitter_consumer_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "twitter_consumer_secret", value)
@property
@pulumi.getter(name="twitterConsumerSecretSettingName")
def twitter_consumer_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
"""
return pulumi.get(self, "twitter_consumer_secret_setting_name")
@twitter_consumer_secret_setting_name.setter
def twitter_consumer_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "twitter_consumer_secret_setting_name", value)
@property
@pulumi.getter(name="unauthenticatedClientAction")
def unauthenticated_client_action(self) -> Optional[pulumi.Input['UnauthenticatedClientAction']]:
"""
The action to take when an unauthenticated client attempts to access the app.
"""
return pulumi.get(self, "unauthenticated_client_action")
@unauthenticated_client_action.setter
def unauthenticated_client_action(self, value: Optional[pulumi.Input['UnauthenticatedClientAction']]):
pulumi.set(self, "unauthenticated_client_action", value)
@property
@pulumi.getter(name="validateIssuer")
def validate_issuer(self) -> Optional[pulumi.Input[bool]]:
"""
Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
return pulumi.get(self, "validate_issuer")
@validate_issuer.setter
def validate_issuer(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "validate_issuer", value)
class WebAppAuthSettingsSlot(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aad_claims_authorization: Optional[pulumi.Input[str]] = None,
additional_login_params: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_external_redirect_urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
auth_file_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
client_secret_certificate_thumbprint: Optional[pulumi.Input[str]] = None,
client_secret_setting_name: Optional[pulumi.Input[str]] = None,
config_version: Optional[pulumi.Input[str]] = None,
default_provider: Optional[pulumi.Input['BuiltInAuthenticationProvider']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
facebook_app_id: Optional[pulumi.Input[str]] = None,
facebook_app_secret: Optional[pulumi.Input[str]] = None,
facebook_app_secret_setting_name: Optional[pulumi.Input[str]] = None,
facebook_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
git_hub_client_id: Optional[pulumi.Input[str]] = None,
git_hub_client_secret: Optional[pulumi.Input[str]] = None,
git_hub_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
git_hub_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
google_client_id: Optional[pulumi.Input[str]] = None,
google_client_secret: Optional[pulumi.Input[str]] = None,
google_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
google_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_auth_from_file: Optional[pulumi.Input[str]] = None,
issuer: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
microsoft_account_client_id: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
microsoft_account_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runtime_version: Optional[pulumi.Input[str]] = None,
slot: Optional[pulumi.Input[str]] = None,
token_refresh_extension_hours: Optional[pulumi.Input[float]] = None,
token_store_enabled: Optional[pulumi.Input[bool]] = None,
twitter_consumer_key: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret_setting_name: Optional[pulumi.Input[str]] = None,
unauthenticated_client_action: Optional[pulumi.Input['UnauthenticatedClientAction']] = None,
validate_issuer: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Configuration settings for the Azure App Service Authentication / Authorization feature.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] aad_claims_authorization: Gets a JSON string containing the Azure AD Acl settings.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_login_params: Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_audiences: Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_external_redirect_urls: External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:param pulumi.Input[str] auth_file_path: The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
:param pulumi.Input[str] client_id: The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret: The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret_certificate_thumbprint: An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
:param pulumi.Input[str] client_secret_setting_name: The app setting name that contains the client secret of the relying party application.
:param pulumi.Input[str] config_version: The ConfigVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of the control plane for Authentication / Authorization.
:param pulumi.Input['BuiltInAuthenticationProvider'] default_provider: The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
:param pulumi.Input[bool] enabled: <code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
:param pulumi.Input[str] facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret: The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret_setting_name: The app setting name that contains the app secret used for Facebook Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] facebook_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] git_hub_client_id: The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
:param pulumi.Input[str] git_hub_client_secret: The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
:param pulumi.Input[str] git_hub_client_secret_setting_name: The app setting name that contains the client secret of the Github
app used for GitHub Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] git_hub_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
:param pulumi.Input[str] google_client_id: The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret: The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret_setting_name: The app setting name that contains the client secret associated with
the Google web application.
:param pulumi.Input[Sequence[pulumi.Input[str]]] google_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] is_auth_from_file: "true" if the auth config settings should be read from a file,
"false" otherwise
:param pulumi.Input[str] issuer: The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] microsoft_account_client_id: The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret: The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret_setting_name: The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
:param pulumi.Input[Sequence[pulumi.Input[str]]] microsoft_account_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
:param pulumi.Input[str] name: Name of web app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] runtime_version: The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
:param pulumi.Input[str] slot: Name of web app slot. If not specified then will default to production slot.
:param pulumi.Input[float] token_refresh_extension_hours: The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
:param pulumi.Input[bool] token_store_enabled: <code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
:param pulumi.Input[str] twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret: The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret_setting_name: The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
:param pulumi.Input['UnauthenticatedClientAction'] unauthenticated_client_action: The action to take when an unauthenticated client attempts to access the app.
:param pulumi.Input[bool] validate_issuer: Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WebAppAuthSettingsSlotArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Configuration settings for the Azure App Service Authentication / Authorization feature.
:param str resource_name: The name of the resource.
:param WebAppAuthSettingsSlotArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WebAppAuthSettingsSlotArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aad_claims_authorization: Optional[pulumi.Input[str]] = None,
additional_login_params: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_external_redirect_urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
auth_file_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
client_secret_certificate_thumbprint: Optional[pulumi.Input[str]] = None,
client_secret_setting_name: Optional[pulumi.Input[str]] = None,
config_version: Optional[pulumi.Input[str]] = None,
default_provider: Optional[pulumi.Input['BuiltInAuthenticationProvider']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
facebook_app_id: Optional[pulumi.Input[str]] = None,
facebook_app_secret: Optional[pulumi.Input[str]] = None,
facebook_app_secret_setting_name: Optional[pulumi.Input[str]] = None,
facebook_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
git_hub_client_id: Optional[pulumi.Input[str]] = None,
git_hub_client_secret: Optional[pulumi.Input[str]] = None,
git_hub_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
git_hub_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
google_client_id: Optional[pulumi.Input[str]] = None,
google_client_secret: Optional[pulumi.Input[str]] = None,
google_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
google_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_auth_from_file: Optional[pulumi.Input[str]] = None,
issuer: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
microsoft_account_client_id: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
microsoft_account_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runtime_version: Optional[pulumi.Input[str]] = None,
slot: Optional[pulumi.Input[str]] = None,
token_refresh_extension_hours: Optional[pulumi.Input[float]] = None,
token_store_enabled: Optional[pulumi.Input[bool]] = None,
twitter_consumer_key: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret_setting_name: Optional[pulumi.Input[str]] = None,
unauthenticated_client_action: Optional[pulumi.Input['UnauthenticatedClientAction']] = None,
validate_issuer: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WebAppAuthSettingsSlotArgs.__new__(WebAppAuthSettingsSlotArgs)
__props__.__dict__["aad_claims_authorization"] = aad_claims_authorization
__props__.__dict__["additional_login_params"] = additional_login_params
__props__.__dict__["allowed_audiences"] = allowed_audiences
__props__.__dict__["allowed_external_redirect_urls"] = allowed_external_redirect_urls
__props__.__dict__["auth_file_path"] = auth_file_path
__props__.__dict__["client_id"] = client_id
__props__.__dict__["client_secret"] = client_secret
__props__.__dict__["client_secret_certificate_thumbprint"] = client_secret_certificate_thumbprint
__props__.__dict__["client_secret_setting_name"] = client_secret_setting_name
__props__.__dict__["config_version"] = config_version
__props__.__dict__["default_provider"] = default_provider
__props__.__dict__["enabled"] = enabled
__props__.__dict__["facebook_app_id"] = facebook_app_id
__props__.__dict__["facebook_app_secret"] = facebook_app_secret
__props__.__dict__["facebook_app_secret_setting_name"] = facebook_app_secret_setting_name
__props__.__dict__["facebook_o_auth_scopes"] = facebook_o_auth_scopes
__props__.__dict__["git_hub_client_id"] = git_hub_client_id
__props__.__dict__["git_hub_client_secret"] = git_hub_client_secret
__props__.__dict__["git_hub_client_secret_setting_name"] = git_hub_client_secret_setting_name
__props__.__dict__["git_hub_o_auth_scopes"] = git_hub_o_auth_scopes
__props__.__dict__["google_client_id"] = google_client_id
__props__.__dict__["google_client_secret"] = google_client_secret
__props__.__dict__["google_client_secret_setting_name"] = google_client_secret_setting_name
__props__.__dict__["google_o_auth_scopes"] = google_o_auth_scopes
__props__.__dict__["is_auth_from_file"] = is_auth_from_file
__props__.__dict__["issuer"] = issuer
__props__.__dict__["kind"] = kind
__props__.__dict__["microsoft_account_client_id"] = microsoft_account_client_id
__props__.__dict__["microsoft_account_client_secret"] = microsoft_account_client_secret
__props__.__dict__["microsoft_account_client_secret_setting_name"] = microsoft_account_client_secret_setting_name
__props__.__dict__["microsoft_account_o_auth_scopes"] = microsoft_account_o_auth_scopes
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["runtime_version"] = runtime_version
if slot is None and not opts.urn:
raise TypeError("Missing required property 'slot'")
__props__.__dict__["slot"] = slot
__props__.__dict__["token_refresh_extension_hours"] = token_refresh_extension_hours
__props__.__dict__["token_store_enabled"] = token_store_enabled
__props__.__dict__["twitter_consumer_key"] = twitter_consumer_key
__props__.__dict__["twitter_consumer_secret"] = twitter_consumer_secret
__props__.__dict__["twitter_consumer_secret_setting_name"] = twitter_consumer_secret_setting_name
__props__.__dict__["unauthenticated_client_action"] = unauthenticated_client_action
__props__.__dict__["validate_issuer"] = validate_issuer
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:web/v20210101:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20150801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20150801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20160801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20160801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20180201:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20180201:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20181101:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20181101:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20190801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20190801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20200601:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200601:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20200901:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200901:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20201001:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20201001:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20201201:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20201201:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20210115:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20210115:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-native:web/v20210201:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20210201:WebAppAuthSettingsSlot")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(WebAppAuthSettingsSlot, __self__).__init__(
'azure-native:web/v20210101:WebAppAuthSettingsSlot',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'WebAppAuthSettingsSlot':
"""
Get an existing WebAppAuthSettingsSlot resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = WebAppAuthSettingsSlotArgs.__new__(WebAppAuthSettingsSlotArgs)
__props__.__dict__["aad_claims_authorization"] = None
__props__.__dict__["additional_login_params"] = None
__props__.__dict__["allowed_audiences"] = None
__props__.__dict__["allowed_external_redirect_urls"] = None
__props__.__dict__["auth_file_path"] = None
__props__.__dict__["client_id"] = None
__props__.__dict__["client_secret"] = None
__props__.__dict__["client_secret_certificate_thumbprint"] = None
__props__.__dict__["client_secret_setting_name"] = None
__props__.__dict__["config_version"] = None
__props__.__dict__["default_provider"] = None
__props__.__dict__["enabled"] = None
__props__.__dict__["facebook_app_id"] = None
__props__.__dict__["facebook_app_secret"] = None
__props__.__dict__["facebook_app_secret_setting_name"] = None
__props__.__dict__["facebook_o_auth_scopes"] = None
__props__.__dict__["git_hub_client_id"] = None
__props__.__dict__["git_hub_client_secret"] = None
__props__.__dict__["git_hub_client_secret_setting_name"] = None
__props__.__dict__["git_hub_o_auth_scopes"] = None
__props__.__dict__["google_client_id"] = None
__props__.__dict__["google_client_secret"] = None
__props__.__dict__["google_client_secret_setting_name"] = None
__props__.__dict__["google_o_auth_scopes"] = None
__props__.__dict__["is_auth_from_file"] = None
__props__.__dict__["issuer"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["microsoft_account_client_id"] = None
__props__.__dict__["microsoft_account_client_secret"] = None
__props__.__dict__["microsoft_account_client_secret_setting_name"] = None
__props__.__dict__["microsoft_account_o_auth_scopes"] = None
__props__.__dict__["name"] = None
__props__.__dict__["runtime_version"] = None
__props__.__dict__["token_refresh_extension_hours"] = None
__props__.__dict__["token_store_enabled"] = None
__props__.__dict__["twitter_consumer_key"] = None
__props__.__dict__["twitter_consumer_secret"] = None
__props__.__dict__["twitter_consumer_secret_setting_name"] = None
__props__.__dict__["type"] = None
__props__.__dict__["unauthenticated_client_action"] = None
__props__.__dict__["validate_issuer"] = None
return WebAppAuthSettingsSlot(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="aadClaimsAuthorization")
def aad_claims_authorization(self) -> pulumi.Output[Optional[str]]:
"""
Gets a JSON string containing the Azure AD Acl settings.
"""
return pulumi.get(self, "aad_claims_authorization")
@property
@pulumi.getter(name="additionalLoginParams")
def additional_login_params(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
"""
return pulumi.get(self, "additional_login_params")
@property
@pulumi.getter(name="allowedAudiences")
def allowed_audiences(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
"""
return pulumi.get(self, "allowed_audiences")
@property
@pulumi.getter(name="allowedExternalRedirectUrls")
def allowed_external_redirect_urls(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
"""
return pulumi.get(self, "allowed_external_redirect_urls")
@property
@pulumi.getter(name="authFilePath")
def auth_file_path(self) -> pulumi.Output[Optional[str]]:
"""
The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
"""
return pulumi.get(self, "auth_file_path")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[Optional[str]]:
"""
The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_secret")
@property
@pulumi.getter(name="clientSecretCertificateThumbprint")
def client_secret_certificate_thumbprint(self) -> pulumi.Output[Optional[str]]:
"""
An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
"""
return pulumi.get(self, "client_secret_certificate_thumbprint")
@property
@pulumi.getter(name="clientSecretSettingName")
def client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the client secret of the relying party application.
"""
return pulumi.get(self, "client_secret_setting_name")
@property
@pulumi.getter(name="configVersion")
def config_version(self) -> pulumi.Output[Optional[str]]:
"""
The ConfigVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of the control plane for Authentication / Authorization.
"""
return pulumi.get(self, "config_version")
@property
@pulumi.getter(name="defaultProvider")
def default_provider(self) -> pulumi.Output[Optional[str]]:
"""
The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
"""
return pulumi.get(self, "default_provider")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
<code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="facebookAppId")
def facebook_app_id(self) -> pulumi.Output[Optional[str]]:
"""
The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_id")
@property
@pulumi.getter(name="facebookAppSecret")
def facebook_app_secret(self) -> pulumi.Output[Optional[str]]:
"""
The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_secret")
@property
@pulumi.getter(name="facebookAppSecretSettingName")
def facebook_app_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the app secret used for Facebook Login.
"""
return pulumi.get(self, "facebook_app_secret_setting_name")
@property
@pulumi.getter(name="facebookOAuthScopes")
def facebook_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_o_auth_scopes")
@property
@pulumi.getter(name="gitHubClientId")
def git_hub_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
"""
return pulumi.get(self, "git_hub_client_id")
@property
@pulumi.getter(name="gitHubClientSecret")
def git_hub_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
"""
return pulumi.get(self, "git_hub_client_secret")
@property
@pulumi.getter(name="gitHubClientSecretSettingName")
def git_hub_client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the client secret of the Github
app used for GitHub Login.
"""
return pulumi.get(self, "git_hub_client_secret_setting_name")
@property
@pulumi.getter(name="gitHubOAuthScopes")
def git_hub_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
"""
return pulumi.get(self, "git_hub_o_auth_scopes")
@property
@pulumi.getter(name="googleClientId")
def google_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_id")
@property
@pulumi.getter(name="googleClientSecret")
def google_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_secret")
@property
@pulumi.getter(name="googleClientSecretSettingName")
def google_client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the client secret associated with
the Google web application.
"""
return pulumi.get(self, "google_client_secret_setting_name")
@property
@pulumi.getter(name="googleOAuthScopes")
def google_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_o_auth_scopes")
@property
@pulumi.getter(name="isAuthFromFile")
def is_auth_from_file(self) -> pulumi.Output[Optional[str]]:
"""
"true" if the auth config settings should be read from a file,
"false" otherwise
"""
return pulumi.get(self, "is_auth_from_file")
@property
@pulumi.getter
def issuer(self) -> pulumi.Output[Optional[str]]:
"""
The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
"""
return pulumi.get(self, "issuer")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="microsoftAccountClientId")
def microsoft_account_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_id")
@property
@pulumi.getter(name="microsoftAccountClientSecret")
def microsoft_account_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_secret")
@property
@pulumi.getter(name="microsoftAccountClientSecretSettingName")
def microsoft_account_client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
"""
return pulumi.get(self, "microsoft_account_client_secret_setting_name")
@property
@pulumi.getter(name="microsoftAccountOAuthScopes")
def microsoft_account_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
"""
return pulumi.get(self, "microsoft_account_o_auth_scopes")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="runtimeVersion")
def runtime_version(self) -> pulumi.Output[Optional[str]]:
"""
The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
"""
return pulumi.get(self, "runtime_version")
@property
@pulumi.getter(name="tokenRefreshExtensionHours")
def token_refresh_extension_hours(self) -> pulumi.Output[Optional[float]]:
"""
The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
"""
return pulumi.get(self, "token_refresh_extension_hours")
@property
@pulumi.getter(name="tokenStoreEnabled")
def token_store_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
<code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
"""
return pulumi.get(self, "token_store_enabled")
@property
@pulumi.getter(name="twitterConsumerKey")
def twitter_consumer_key(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_key")
@property
@pulumi.getter(name="twitterConsumerSecret")
def twitter_consumer_secret(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_secret")
@property
@pulumi.getter(name="twitterConsumerSecretSettingName")
def twitter_consumer_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
"""
return pulumi.get(self, "twitter_consumer_secret_setting_name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="unauthenticatedClientAction")
def unauthenticated_client_action(self) -> pulumi.Output[Optional[str]]:
"""
The action to take when an unauthenticated client attempts to access the app.
"""
return pulumi.get(self, "unauthenticated_client_action")
@property
@pulumi.getter(name="validateIssuer")
def validate_issuer(self) -> pulumi.Output[Optional[bool]]:
"""
Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
return pulumi.get(self, "validate_issuer")
| 57.861569
| 1,871
| 0.697047
| 10,864
| 87,776
| 5.382088
| 0.036543
| 0.065092
| 0.060816
| 0.051171
| 0.944862
| 0.914573
| 0.880026
| 0.825537
| 0.801149
| 0.757093
| 0
| 0.004837
| 0.215606
| 87,776
| 1,516
| 1,872
| 57.899736
| 0.844403
| 0.385777
| 0
| 0.472906
| 1
| 0
| 0.173168
| 0.117614
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16133
| false
| 0.001232
| 0.007389
| 0
| 0.274631
| 0.019704
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0a5eaa12d00098052b66e44dd09af5f6e2e8cc27
| 74,340
|
py
|
Python
|
backend/api/python_http_client/kfp_server_api/api/pipeline_service_api.py
|
votti/pipelines
|
1c3e2768e6177d5d6e3f4b8eff8fafb9a3b76c1f
|
[
"Apache-2.0"
] | 1
|
2022-03-30T05:22:19.000Z
|
2022-03-30T05:22:19.000Z
|
backend/api/python_http_client/kfp_server_api/api/pipeline_service_api.py
|
votti/pipelines
|
1c3e2768e6177d5d6e3f4b8eff8fafb9a3b76c1f
|
[
"Apache-2.0"
] | 1
|
2020-02-06T12:53:44.000Z
|
2020-02-06T12:53:44.000Z
|
backend/api/python_http_client/kfp_server_api/api/pipeline_service_api.py
|
votti/pipelines
|
1c3e2768e6177d5d6e3f4b8eff8fafb9a3b76c1f
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubeflow Pipelines API
This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.
Contact: kubeflow-pipelines@google.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kfp_server_api.api_client import ApiClient
from kfp_server_api.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class PipelineServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_pipeline(self, body, **kwargs): # noqa: E501
"""Creates a pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_pipeline(body, async_req=True)
>>> result = thread.get()
:param body: (required)
:type body: ApiPipeline
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiPipeline
"""
kwargs['_return_http_data_only'] = True
return self.create_pipeline_with_http_info(body, **kwargs) # noqa: E501
def create_pipeline_with_http_info(self, body, **kwargs): # noqa: E501
"""Creates a pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_pipeline_with_http_info(body, async_req=True)
>>> result = thread.get()
:param body: (required)
:type body: ApiPipeline
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiPipeline, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_pipeline" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_pipeline`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipelines', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiPipeline', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_pipeline_version(self, body, **kwargs): # noqa: E501
"""Adds a pipeline version to the specified pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_pipeline_version(body, async_req=True)
>>> result = thread.get()
:param body: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required)
:type body: ApiPipelineVersion
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiPipelineVersion
"""
kwargs['_return_http_data_only'] = True
return self.create_pipeline_version_with_http_info(body, **kwargs) # noqa: E501
def create_pipeline_version_with_http_info(self, body, **kwargs): # noqa: E501
"""Adds a pipeline version to the specified pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_pipeline_version_with_http_info(body, async_req=True)
>>> result = thread.get()
:param body: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required)
:type body: ApiPipelineVersion
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiPipelineVersion, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_pipeline_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_pipeline_version`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipeline_versions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiPipelineVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_pipeline(self, id, **kwargs): # noqa: E501
"""Deletes a pipeline and its pipeline versions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pipeline(id, async_req=True)
>>> result = thread.get()
:param id: The ID of the pipeline to be deleted. (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: object
"""
kwargs['_return_http_data_only'] = True
return self.delete_pipeline_with_http_info(id, **kwargs) # noqa: E501
def delete_pipeline_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes a pipeline and its pipeline versions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pipeline_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: The ID of the pipeline to be deleted. (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(object, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_pipeline" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_pipeline`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipelines/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_pipeline_version(self, version_id, **kwargs): # noqa: E501
"""Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pipeline_version(version_id, async_req=True)
>>> result = thread.get()
:param version_id: The ID of the pipeline version to be deleted. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: object
"""
kwargs['_return_http_data_only'] = True
return self.delete_pipeline_version_with_http_info(version_id, **kwargs) # noqa: E501
def delete_pipeline_version_with_http_info(self, version_id, **kwargs): # noqa: E501
"""Deletes a pipeline version by pipeline version ID. If the deleted pipeline version is the default pipeline version, the pipeline's default version changes to the pipeline's most recent pipeline version. If there are no remaining pipeline versions, the pipeline will have no default version. Examines the run_service_api.ipynb notebook to learn more about creating a run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pipeline_version_with_http_info(version_id, async_req=True)
>>> result = thread.get()
:param version_id: The ID of the pipeline version to be deleted. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(object, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'version_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_pipeline_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'version_id' is set
if self.api_client.client_side_validation and ('version_id' not in local_var_params or # noqa: E501
local_var_params['version_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `version_id` when calling `delete_pipeline_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'version_id' in local_var_params:
path_params['version_id'] = local_var_params['version_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipeline_versions/{version_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_pipeline(self, id, **kwargs): # noqa: E501
"""Finds a specific pipeline by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline(id, async_req=True)
>>> result = thread.get()
:param id: The ID of the pipeline to be retrieved. (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiPipeline
"""
kwargs['_return_http_data_only'] = True
return self.get_pipeline_with_http_info(id, **kwargs) # noqa: E501
def get_pipeline_with_http_info(self, id, **kwargs): # noqa: E501
"""Finds a specific pipeline by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: The ID of the pipeline to be retrieved. (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiPipeline, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pipeline" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_pipeline`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipelines/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiPipeline', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_pipeline_by_name(self, namespace, name, **kwargs): # noqa: E501
"""Finds a pipeline by Name (and namespace) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline_by_name(namespace, name, async_req=True)
>>> result = thread.get()
:param namespace: The Namespace the pipeline belongs to. In the case of shared pipelines and KFPipeline standalone installation, the pipeline name is the only needed field for unique resource lookup (namespace is not required). In those case, please provide hyphen (dash character, \"-\"). (required)
:type namespace: str
:param name: The Name of the pipeline to be retrieved. (required)
:type name: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiPipeline
"""
kwargs['_return_http_data_only'] = True
return self.get_pipeline_by_name_with_http_info(namespace, name, **kwargs) # noqa: E501
def get_pipeline_by_name_with_http_info(self, namespace, name, **kwargs): # noqa: E501
"""Finds a pipeline by Name (and namespace) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline_by_name_with_http_info(namespace, name, async_req=True)
>>> result = thread.get()
:param namespace: The Namespace the pipeline belongs to. In the case of shared pipelines and KFPipeline standalone installation, the pipeline name is the only needed field for unique resource lookup (namespace is not required). In those case, please provide hyphen (dash character, \"-\"). (required)
:type namespace: str
:param name: The Name of the pipeline to be retrieved. (required)
:type name: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiPipeline, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'namespace',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pipeline_by_name" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'namespace' is set
if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501
local_var_params['namespace'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `namespace` when calling `get_pipeline_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `get_pipeline_by_name`") # noqa: E501
collection_formats = {}
path_params = {}
if 'namespace' in local_var_params:
path_params['namespace'] = local_var_params['namespace'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/namespaces/{namespace}/pipelines/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiPipeline', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_pipeline_version(self, version_id, **kwargs): # noqa: E501
"""Gets a pipeline version by pipeline version ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline_version(version_id, async_req=True)
>>> result = thread.get()
:param version_id: The ID of the pipeline version to be retrieved. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiPipelineVersion
"""
kwargs['_return_http_data_only'] = True
return self.get_pipeline_version_with_http_info(version_id, **kwargs) # noqa: E501
def get_pipeline_version_with_http_info(self, version_id, **kwargs): # noqa: E501
"""Gets a pipeline version by pipeline version ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline_version_with_http_info(version_id, async_req=True)
>>> result = thread.get()
:param version_id: The ID of the pipeline version to be retrieved. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiPipelineVersion, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'version_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pipeline_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'version_id' is set
if self.api_client.client_side_validation and ('version_id' not in local_var_params or # noqa: E501
local_var_params['version_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `version_id` when calling `get_pipeline_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'version_id' in local_var_params:
path_params['version_id'] = local_var_params['version_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipeline_versions/{version_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiPipelineVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_pipeline_version_template(self, version_id, **kwargs): # noqa: E501
"""Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline_version_template(version_id, async_req=True)
>>> result = thread.get()
:param version_id: The ID of the pipeline version whose template is to be retrieved. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiGetTemplateResponse
"""
kwargs['_return_http_data_only'] = True
return self.get_pipeline_version_template_with_http_info(version_id, **kwargs) # noqa: E501
def get_pipeline_version_template_with_http_info(self, version_id, **kwargs): # noqa: E501
"""Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pipeline_version_template_with_http_info(version_id, async_req=True)
>>> result = thread.get()
:param version_id: The ID of the pipeline version whose template is to be retrieved. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiGetTemplateResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'version_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pipeline_version_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'version_id' is set
if self.api_client.client_side_validation and ('version_id' not in local_var_params or # noqa: E501
local_var_params['version_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `version_id` when calling `get_pipeline_version_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'version_id' in local_var_params:
path_params['version_id'] = local_var_params['version_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipeline_versions/{version_id}/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiGetTemplateResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template(self, id, **kwargs): # noqa: E501
"""Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template(id, async_req=True)
>>> result = thread.get()
:param id: The ID of the pipeline whose template is to be retrieved. (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiGetTemplateResponse
"""
kwargs['_return_http_data_only'] = True
return self.get_template_with_http_info(id, **kwargs) # noqa: E501
def get_template_with_http_info(self, id, **kwargs): # noqa: E501
"""Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: The ID of the pipeline whose template is to be retrieved. (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiGetTemplateResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipelines/{id}/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiGetTemplateResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_pipeline_versions(self, **kwargs): # noqa: E501
"""Lists all pipeline versions of a given pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_pipeline_versions(async_req=True)
>>> result = thread.get()
:param resource_key_type: The type of the resource that referred to.
:type resource_key_type: str
:param resource_key_id: The ID of the resource that referred to.
:type resource_key_id: str
:param page_size: The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a nextPageToken field you can use to fetch the next page.
:type page_size: int
:param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelineVersions call or can be omitted when fetching the first page.
:type page_token: str
:param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default.
:type sort_by: str
:param filter: A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto).
:type filter: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiListPipelineVersionsResponse
"""
kwargs['_return_http_data_only'] = True
return self.list_pipeline_versions_with_http_info(**kwargs) # noqa: E501
def list_pipeline_versions_with_http_info(self, **kwargs): # noqa: E501
"""Lists all pipeline versions of a given pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_pipeline_versions_with_http_info(async_req=True)
>>> result = thread.get()
:param resource_key_type: The type of the resource that referred to.
:type resource_key_type: str
:param resource_key_id: The ID of the resource that referred to.
:type resource_key_id: str
:param page_size: The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a nextPageToken field you can use to fetch the next page.
:type page_size: int
:param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelineVersions call or can be omitted when fetching the first page.
:type page_token: str
:param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default.
:type sort_by: str
:param filter: A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto).
:type filter: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiListPipelineVersionsResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'resource_key_type',
'resource_key_id',
'page_size',
'page_token',
'sort_by',
'filter'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_pipeline_versions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'resource_key_type' in local_var_params and local_var_params['resource_key_type'] is not None: # noqa: E501
query_params.append(('resource_key.type', local_var_params['resource_key_type'])) # noqa: E501
if 'resource_key_id' in local_var_params and local_var_params['resource_key_id'] is not None: # noqa: E501
query_params.append(('resource_key.id', local_var_params['resource_key_id'])) # noqa: E501
if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501
query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501
if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501
query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501
if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501
query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501
if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipeline_versions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiListPipelineVersionsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_pipelines(self, **kwargs): # noqa: E501
"""Finds all pipelines. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_pipelines(async_req=True)
>>> result = thread.get()
:param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelines call.
:type page_token: str
:param page_size: The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field.
:type page_size: int
:param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default.
:type sort_by: str
:param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)).
:type filter: str
:param resource_reference_key_type: The type of the resource that referred to.
:type resource_reference_key_type: str
:param resource_reference_key_id: The ID of the resource that referred to.
:type resource_reference_key_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ApiListPipelinesResponse
"""
kwargs['_return_http_data_only'] = True
return self.list_pipelines_with_http_info(**kwargs) # noqa: E501
def list_pipelines_with_http_info(self, **kwargs): # noqa: E501
"""Finds all pipelines. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_pipelines_with_http_info(async_req=True)
>>> result = thread.get()
:param page_token: A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelines call.
:type page_token: str
:param page_size: The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field.
:type page_size: int
:param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default.
:type sort_by: str
:param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)).
:type filter: str
:param resource_reference_key_type: The type of the resource that referred to.
:type resource_reference_key_type: str
:param resource_reference_key_id: The ID of the resource that referred to.
:type resource_reference_key_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ApiListPipelinesResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'page_token',
'page_size',
'sort_by',
'filter',
'resource_reference_key_type',
'resource_reference_key_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_pipelines" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501
query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501
if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501
query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501
if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501
query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501
if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'resource_reference_key_type' in local_var_params and local_var_params['resource_reference_key_type'] is not None: # noqa: E501
query_params.append(('resource_reference_key.type', local_var_params['resource_reference_key_type'])) # noqa: E501
if 'resource_reference_key_id' in local_var_params and local_var_params['resource_reference_key_id'] is not None: # noqa: E501
query_params.append(('resource_reference_key.id', local_var_params['resource_reference_key_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipelines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiListPipelinesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_pipeline_default_version(self, pipeline_id, version_id, **kwargs): # noqa: E501
"""Update the default pipeline version of a specific pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pipeline_default_version(pipeline_id, version_id, async_req=True)
>>> result = thread.get()
:param pipeline_id: The ID of the pipeline to be updated. (required)
:type pipeline_id: str
:param version_id: The ID of the default version. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: object
"""
kwargs['_return_http_data_only'] = True
return self.update_pipeline_default_version_with_http_info(pipeline_id, version_id, **kwargs) # noqa: E501
def update_pipeline_default_version_with_http_info(self, pipeline_id, version_id, **kwargs): # noqa: E501
"""Update the default pipeline version of a specific pipeline. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pipeline_default_version_with_http_info(pipeline_id, version_id, async_req=True)
>>> result = thread.get()
:param pipeline_id: The ID of the pipeline to be updated. (required)
:type pipeline_id: str
:param version_id: The ID of the default version. (required)
:type version_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(object, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'pipeline_id',
'version_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_pipeline_default_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pipeline_id' is set
if self.api_client.client_side_validation and ('pipeline_id' not in local_var_params or # noqa: E501
local_var_params['pipeline_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pipeline_id` when calling `update_pipeline_default_version`") # noqa: E501
# verify the required parameter 'version_id' is set
if self.api_client.client_side_validation and ('version_id' not in local_var_params or # noqa: E501
local_var_params['version_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `version_id` when calling `update_pipeline_default_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pipeline_id' in local_var_params:
path_params['pipeline_id'] = local_var_params['pipeline_id'] # noqa: E501
if 'version_id' in local_var_params:
path_params['version_id'] = local_var_params['version_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.63739
| 511
| 0.60682
| 8,501
| 74,340
| 5.066816
| 0.035643
| 0.037889
| 0.058505
| 0.030088
| 0.970747
| 0.967218
| 0.960788
| 0.954635
| 0.949226
| 0.940612
| 0
| 0.012036
| 0.321577
| 74,340
| 1,593
| 512
| 46.666667
| 0.842011
| 0.497807
| 0
| 0.74893
| 1
| 0
| 0.184421
| 0.060504
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035663
| false
| 0
| 0.007133
| 0
| 0.078459
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0a71a8e35565c0ea30092d91d42ab01065a570dc
| 9,248
|
py
|
Python
|
saltweb/files/getapp3.py
|
AlexandreLoman/jpsaltweb-formula
|
fb02f2dee0d22c3c5890681c24fec571056e7d7f
|
[
"Apache-2.0"
] | null | null | null |
saltweb/files/getapp3.py
|
AlexandreLoman/jpsaltweb-formula
|
fb02f2dee0d22c3c5890681c24fec571056e7d7f
|
[
"Apache-2.0"
] | null | null | null |
saltweb/files/getapp3.py
|
AlexandreLoman/jpsaltweb-formula
|
fb02f2dee0d22c3c5890681c24fec571056e7d7f
|
[
"Apache-2.0"
] | null | null | null |
{% from "saltweb/map.jinja" import saltweb with context %}
#!/usr/bin/env python
import requests
import json
import sys
import re
import redis
def getInfo():
#return "hello world"
redis_host = '{{ saltweb.app.redis.host }}'
redis_port = {{ saltweb.app.redis.port }}
redis_pswd = {{ saltweb.app.redis.password }}
redis_r = redis.StrictRedis(host=redis_host,
port=redis_port,
password=redis_pswd,
db=0)
rev = None
username = '{{ saltweb.app.salt_api.username }}'
password = '{{ saltweb.app.salt_api.password }}'
proto = '{{ saltweb.app.salt_api.proto }}'
host = '{{ saltweb.app.salt_api.host }}'
port = '{{ saltweb.app.salt_api.port }}'
payload_auth = {'username': username, 'password': password, 'eauth': 'pam'}
headers = {'Accept': 'application/json'}
applications = set()
mineget_srv = '{{ saltweb.app.mineget_srv }}'
payload = {'client': 'local',
'tgt': mineget_srv,
'expr_form': 'compound',
'fun': 'mine.get',
'arg': ['G@applications:* and G@roles:webserver','network.ip_addrs', "compound"],
'username': username,
'password': password,
'eauth': 'pam',
'timeout': '2'}
r = requests.post(proto+'://' + host + ':' + port + '/run',
data=payload,
headers=headers,
verify=False)
srvs = ""
for srv in r.json()['return'][0][mineget_srv]:
srvs += srv+","
payload = {'client': 'local',
'tgt': srvs, #'G@applications:* and G@roles:webserver',
'expr_form': 'list',
'fun': 'jp_api.info',
'username': username,
'password': password,
'eauth': 'pam',
'timeout': '2'}
r = requests.post(proto+'://' + host + ':' + port + '/run',
data=payload,
headers=headers,
verify=False)
rjson = r.json()
# print json.dumps(r.json()['return'][0], indent=2, sort_keys=True)
for srv in rjson['return'][0]:
if not isinstance(rjson['return'][0][srv], basestring):
for app in rjson['return'][0][srv]:
applications.add(app)
print app
result = []
for app in applications:
for srv in rjson['return'][0]:
if app in rjson['return'][0][srv]:
for env in rjson['return'][0][srv][app]:
if len(rjson['return'][0][srv][app][env]) > 0:
for repo in rjson['return'][0][srv][app][env][0]:
try:
rev = redis_r.hget(app,
repo+':'+env+':'+app)
except:
pass
if rjson['return'][0][srv][app][env][0][repo][3]:
extpillar = True
else:
extpillar = False
result.append({'app':app,
'srv':srv,
'env':env,
'repo':repo,
'rev_server':rjson['return'][0][srv][app][env][0][repo][0],
'rev':rev,
'repo_id':rjson['return'][0][srv][app][env][0][repo][2],
'extpillar_used':extpillar,
'target':rjson['return'][0][srv][app][env][0][repo][1].replace("/", ":")[1:]})
rev = ''
# print json.dumps(result, sort_keys=True,
# indent=2, separators=(',', ': '))
# Logout
r = requests.post(proto+'://' + host + ':' + port + '/logout', verify=False)
return json.dumps(result, sort_keys=True,
indent=2, separators=(',', ': '))
#getInfo()
def git_rev(app, key, rev):
#return "hello world"
redis_host = '{{ saltweb.app.redis.host }}'
redis_port = {{ saltweb.app.redis.port }}
redis_pswd = {{ saltweb.app.redis.password }}
redis_r = redis.StrictRedis(host=redis_host,
port=redis_port,
password=redis_pswd,
db=0)
rev = redis_r.hset(app, key, rev)
return 'OK'
def delete_repo(app, key):
#return "hello world"
redis_host = '{{ saltweb.app.redis.host }}'
redis_port = {{ saltweb.app.redis.port }}
redis_pswd = {{ saltweb.app.redis.password }}
redis_r = redis.StrictRedis(host=redis_host,
port=redis_port,
password=redis_pswd,
db=0)
rev = redis_r.hdel(app, key)
return 'OK'
def repo_update(repo, srv, repo_id):
username = '{{ saltweb.app.salt_api.username }}'
password = '{{ saltweb.app.salt_api.password }}'
proto = '{{ saltweb.app.salt_api.proto }}'
host = '{{ saltweb.app.salt_api.host }}'
port = '{{ saltweb.app.salt_api.port }}'
payload_auth = {'username': username, 'password': password, 'eauth': 'pam'}
headers = {'Accept': 'application/json'}
applications = set()
payload1 = {'client': 'local',
'tgt': srv,
'fun': 'saltutil.pillar_refresh',
'username': username,
'password': password,
'eauth': 'pam',
'timeout': '2',
'queue': True}
payload2 = {'client': 'local',
'tgt': srv,
'fun': 'state.sls',
'arg': [repo+'_repo'],
'username': username,
'password': password,
'eauth': 'pam',
'timeout': '2',
'queue': True}
payload3 = {'client': 'local',
'tgt': srv,
'fun': 'state.sls',
'arg': ['files'],
'username': username,
'password': password,
'eauth': 'pam',
'timeout': '2',
'queue': True}
try:
print requests.post(proto+'://' + host + ':' + port + '/run',
data=payload1,
headers=headers,
verify=False)
print requests.post(proto+'://' + host + ':' + port + '/run',
data=payload2,
headers=headers,
verify=False)
print requests.post(proto+'://' + host + ':' + port + '/run',
data=payload3,
headers=headers,
verify=False)
requests.post(proto+'://' + host + ':' + port + '/logout', verify=False)
except:
pass
return 'OK'
def git_update(branch):
username = '{{ saltweb.app.salt_api.username }}'
password = '{{ saltweb.app.salt_api.password }}'
proto = '{{ saltweb.app.salt_api.proto }}'
host = '{{ saltweb.app.salt_api.host }}'
port = '{{ saltweb.app.salt_api.port }}'
payload_auth = {'username': username, 'password': password, 'eauth': 'pam'}
headers = {'Accept': 'application/json'}
applications = set()
payload1 = {'client': 'local',
'tgt': 'G@environment:'+branch,
'tgt_type': 'compound',
'fun': 'saltutil.pillar_refresh',
'username': username,
'password': password,
'eauth': 'pam',
'queue': True}
payload2 = {'client': 'local',
'tgt': 'G@environment:'+branch,
'tgt_type': 'compound',
'fun': 'state.sls',
'arg': ['git_repo'],
'username': username,
'password': password,
'eauth': 'pam',
'queue': True}
payload3 = {'client': 'local',
'tgt': 'G@environment:'+branch,
'tgt_type': 'compound',
'fun': 'state.sls',
'arg': ['files'],
'username': username,
'password': password,
'eauth': 'pam',
'queue': True}
try:
print requests.post(proto+'://' + host + ':' + port + '/run',
data=payload1,
headers=headers,
verify=False)
print requests.post(proto+'://' + host + ':' + port + '/run',
data=payload2,
headers=headers,
verify=False)
print requests.post(proto+'://' + host + ':' + port + '/run',
data=payload3,
headers=headers,
verify=False)
requests.post(proto+'://' + host + ':' + port + '/logout', verify=False)
except:
pass
return 'OK'
| 37.441296
| 121
| 0.435986
| 832
| 9,248
| 4.764423
| 0.153846
| 0.063068
| 0.052977
| 0.064329
| 0.806004
| 0.793138
| 0.753532
| 0.729314
| 0.665742
| 0.643542
| 0
| 0.008777
| 0.408629
| 9,248
| 246
| 122
| 37.593496
| 0.716036
| 0.032548
| 0
| 0.729858
| 0
| 0
| 0.192794
| 0.060087
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.109005
| 0.028436
| null | null | 0.033175
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6a6389f376a662eb05a3801adb116c055758c2d7
| 46,587
|
py
|
Python
|
tensorflow_transform/beam/combiner_packing_util_test.py
|
Mikehem/tfx
|
e803ea6778d8550ec77dcc92bc8172f1a3a90f38
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_transform/beam/combiner_packing_util_test.py
|
Mikehem/tfx
|
e803ea6778d8550ec77dcc92bc8172f1a3a90f38
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_transform/beam/combiner_packing_util_test.py
|
Mikehem/tfx
|
e803ea6778d8550ec77dcc92bc8172f1a3a90f38
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tensorflow_transform.analysis_graph_builder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# GOOGLE-INITIALIZATION
import mock
import tensorflow as tf
import tensorflow_transform as tft
from tensorflow_transform import impl_helper
from tensorflow_transform import nodes
from tensorflow_transform.beam import analysis_graph_builder
from tensorflow_transform.beam import combiner_packing_util
from tensorflow_transform import test_case
mock = tf.compat.v1.test.mock
def _preprocessing_fn_with_packable_analyzer_single_phase(inputs):
x, y = inputs['x'], inputs['y']
x_mean = tft.mean(x, name='x')
x_centered = x - x_mean
y_mean = tft.mean(y, name='y')
y_centered = y - y_mean
z = inputs['z']
z_vocab = tft.vocabulary(z, name='z')
initializer = tf.lookup.TextFileInitializer(
z_vocab,
key_dtype=tf.string,
key_index=tf.lookup.TextFileIndex.WHOLE_LINE,
value_dtype=tf.int64,
value_index=tf.lookup.TextFileIndex.LINE_NUMBER)
table = tf.lookup.StaticHashTable(initializer, default_value=-1)
z_integerized = table.lookup(z)
return {'x_centered': x_centered, 'y_centered': y_centered,
'z_integerized': z_integerized}
_PACKABLE_ANALYZER_SINGLE_PHASE_CASE = dict(
testcase_name='with_packable_analyzer_single_phase',
feature_spec={
'x': tf.io.FixedLenFeature([], tf.float32),
'y': tf.io.FixedLenFeature([], tf.float32),
'z': tf.io.FixedLenFeature([], tf.string)
},
preprocessing_fn=_preprocessing_fn_with_packable_analyzer_single_phase,
num_phases=1,
expected_dot_graph_str_before_packing=r"""digraph G {
directed=True;
node [shape=Mrecord];
"CreateSavedModelForAnalyzerInputs[Phase0]" [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('z/Reshape', \"Tensor\<shape: [None], \<dtype: 'string'\>\>\")])|label: CreateSavedModelForAnalyzerInputs[Phase0]}"];
"ExtractInputForSavedModel[FlattenedDataset]" [label="{ExtractInputForSavedModel|dataset_key: DatasetKey(key='FlattenedDataset')|label: ExtractInputForSavedModel[FlattenedDataset]}"];
"ApplySavedModel[Phase0]" [label="{ApplySavedModel|phase: 0|label: ApplySavedModel[Phase0]|partitionable: True}"];
"CreateSavedModelForAnalyzerInputs[Phase0]" -> "ApplySavedModel[Phase0]";
"ExtractInputForSavedModel[FlattenedDataset]" -> "ApplySavedModel[Phase0]";
"TensorSource[x/mean_and_var]" [label="{ExtractFromDict|keys: ('x/mean_and_var/Cast', 'x/mean_and_var/truediv', 'x/mean_and_var/truediv_1', 'x/mean_and_var/zeros')|label: TensorSource[x/mean_and_var]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "TensorSource[x/mean_and_var]";
"CacheableCombineAccumulate[x/mean_and_var]" [label="{CacheableCombineAccumulate|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineAccumulate[x/mean_and_var]|partitionable: True}"];
"TensorSource[x/mean_and_var]" -> "CacheableCombineAccumulate[x/mean_and_var]";
"CacheableCombineMerge[x/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[x/mean_and_var]}"];
"CacheableCombineAccumulate[x/mean_and_var]" -> "CacheableCombineMerge[x/mean_and_var]";
"ExtractCombineMergeOutputs[x/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[x/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[x/mean_and_var]" -> "ExtractCombineMergeOutputs[x/mean_and_var]";
"CreateTensorBinding[x/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[x/mean_and_var]":0 -> "CreateTensorBinding[x/mean_and_var/Placeholder]";
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[x/mean_and_var]":1 -> "CreateTensorBinding[x/mean_and_var/Placeholder_1]";
"TensorSource[y/mean_and_var]" [label="{ExtractFromDict|keys: ('y/mean_and_var/Cast', 'y/mean_and_var/truediv', 'y/mean_and_var/truediv_1', 'y/mean_and_var/zeros')|label: TensorSource[y/mean_and_var]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "TensorSource[y/mean_and_var]";
"CacheableCombineAccumulate[y/mean_and_var]" [label="{CacheableCombineAccumulate|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineAccumulate[y/mean_and_var]|partitionable: True}"];
"TensorSource[y/mean_and_var]" -> "CacheableCombineAccumulate[y/mean_and_var]";
"CacheableCombineMerge[y/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[y/mean_and_var]}"];
"CacheableCombineAccumulate[y/mean_and_var]" -> "CacheableCombineMerge[y/mean_and_var]";
"ExtractCombineMergeOutputs[y/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[y/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[y/mean_and_var]" -> "ExtractCombineMergeOutputs[y/mean_and_var]";
"CreateTensorBinding[y/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[y/mean_and_var]":0 -> "CreateTensorBinding[y/mean_and_var/Placeholder]";
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[y/mean_and_var]":1 -> "CreateTensorBinding[y/mean_and_var/Placeholder_1]";
"TensorSource[z]" [label="{ExtractFromDict|keys: ('z/Reshape',)|label: TensorSource[z]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "TensorSource[z]";
"VocabularyAccumulate[z]" [label="{VocabularyAccumulate|vocab_ordering_type: 1|input_dtype: string|label: VocabularyAccumulate[z]|partitionable: True}"];
"TensorSource[z]" -> "VocabularyAccumulate[z]";
"VocabularyMerge[z]" [label="{VocabularyMerge|vocab_ordering_type: 1|use_adjusted_mutual_info: False|min_diff_from_avg: None|label: VocabularyMerge[z]}"];
"VocabularyAccumulate[z]" -> "VocabularyMerge[z]";
"VocabularyCount[z]" [label="{VocabularyCount|label: VocabularyCount[z]}"];
"VocabularyMerge[z]" -> "VocabularyCount[z]";
"CreateTensorBinding[z/vocab_z_unpruned_vocab_size]" [label="{CreateTensorBinding|tensor: z/vocab_z_unpruned_vocab_size:0|is_asset_filepath: False|label: CreateTensorBinding[z/vocab_z_unpruned_vocab_size]}"];
"VocabularyCount[z]" -> "CreateTensorBinding[z/vocab_z_unpruned_vocab_size]";
"VocabularyPrune[z]" [label="{VocabularyPrune|top_k: None|frequency_threshold: 0|informativeness_threshold: -inf|coverage_top_k: None|coverage_frequency_threshold: 0|coverage_informativeness_threshold: -inf|key_fn: None|filter_newline_characters: True|label: VocabularyPrune[z]}"];
"VocabularyMerge[z]" -> "VocabularyPrune[z]";
"VocabularyOrderAndWrite[z]" [label="{VocabularyOrderAndWrite|vocab_filename: vocab_z|store_frequency: False|input_dtype: string|label: VocabularyOrderAndWrite[z]|fingerprint_shuffle: False|file_format: text}"];
"VocabularyPrune[z]" -> "VocabularyOrderAndWrite[z]";
"CreateTensorBinding[z/Placeholder]" [label="{CreateTensorBinding|tensor: z/Placeholder:0|is_asset_filepath: True|label: CreateTensorBinding[z/Placeholder]}"];
"VocabularyOrderAndWrite[z]" -> "CreateTensorBinding[z/Placeholder]";
CreateSavedModel [label="{CreateSavedModel|table_initializers: 1|output_signature: OrderedDict([('x_centered', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\"), ('y_centered', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\"), ('z_integerized', \"Tensor\<shape: [None], \<dtype: 'int64'\>\>\")])|label: CreateSavedModel}"];
"CreateTensorBinding[x/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[y/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[z/vocab_z_unpruned_vocab_size]" -> CreateSavedModel;
"CreateTensorBinding[z/Placeholder]" -> CreateSavedModel;
}
""",
expected_dot_graph_str_after_packing=r"""digraph G {
directed=True;
node [shape=Mrecord];
"CreateSavedModelForAnalyzerInputs[Phase0]" [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('z/Reshape', \"Tensor\<shape: [None], \<dtype: 'string'\>\>\")])|label: CreateSavedModelForAnalyzerInputs[Phase0]}"];
"ExtractInputForSavedModel[FlattenedDataset]" [label="{ExtractInputForSavedModel|dataset_key: DatasetKey(key='FlattenedDataset')|label: ExtractInputForSavedModel[FlattenedDataset]}"];
"ApplySavedModel[Phase0]" [label="{ApplySavedModel|phase: 0|label: ApplySavedModel[Phase0]|partitionable: True}"];
"CreateSavedModelForAnalyzerInputs[Phase0]" -> "ApplySavedModel[Phase0]";
"ExtractInputForSavedModel[FlattenedDataset]" -> "ApplySavedModel[Phase0]";
"PackedCombineAccumulate[ApplySavedModel[Phase0]]" [label="{PackedCombineAccumulate|combiners: [_CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('x/mean_and_var/Cast', 'x/mean_and_var/truediv', 'x/mean_and_var/truediv_1', 'x/mean_and_var/zeros'), label='CacheableCombineAccumulate[x/mean_and_var]'), _CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('y/mean_and_var/Cast', 'y/mean_and_var/truediv', 'y/mean_and_var/truediv_1', 'y/mean_and_var/zeros'), label='CacheableCombineAccumulate[y/mean_and_var]')]|label: PackedCombineAccumulate[ApplySavedModel[Phase0]]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "PackedCombineAccumulate[ApplySavedModel[Phase0]]";
"CacheableCombineAccumulate[x/mean_and_var]" [label="{ExtractFromDict|keys: CacheableCombineAccumulate[x/mean_and_var]|label: CacheableCombineAccumulate[x/mean_and_var]|partitionable: True}"];
"PackedCombineAccumulate[ApplySavedModel[Phase0]]" -> "CacheableCombineAccumulate[x/mean_and_var]";
"AddKey[CacheableCombineMerge[x/mean_and_var]]" [label="{AddKey|key: CacheableCombineMerge[x/mean_and_var]|label: AddKey[CacheableCombineMerge[x/mean_and_var]]|partitionable: True}"];
"CacheableCombineAccumulate[x/mean_and_var]" -> "AddKey[CacheableCombineMerge[x/mean_and_var]]";
"CacheableCombineAccumulate[y/mean_and_var]" [label="{ExtractFromDict|keys: CacheableCombineAccumulate[y/mean_and_var]|label: CacheableCombineAccumulate[y/mean_and_var]|partitionable: True}"];
"PackedCombineAccumulate[ApplySavedModel[Phase0]]" -> "CacheableCombineAccumulate[y/mean_and_var]";
"AddKey[CacheableCombineMerge[y/mean_and_var]]" [label="{AddKey|key: CacheableCombineMerge[y/mean_and_var]|label: AddKey[CacheableCombineMerge[y/mean_and_var]]|partitionable: True}"];
"CacheableCombineAccumulate[y/mean_and_var]" -> "AddKey[CacheableCombineMerge[y/mean_and_var]]";
"FlattenInputForPackedCombineMerge[2]" [label="{Flatten|label: FlattenInputForPackedCombineMerge[2]|partitionable: True}"];
"AddKey[CacheableCombineMerge[x/mean_and_var]]" -> "FlattenInputForPackedCombineMerge[2]";
"AddKey[CacheableCombineMerge[y/mean_and_var]]" -> "FlattenInputForPackedCombineMerge[2]";
"PackedCombineMerge[2]" [label="{PackedCombineMerge|combiners: [_CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('CacheableCombineMerge[x/mean_and_var]',), label='CacheableCombineMerge[x/mean_and_var]'), _CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('CacheableCombineMerge[y/mean_and_var]',), label='CacheableCombineMerge[y/mean_and_var]')]|label: PackedCombineMerge[2]}"];
"FlattenInputForPackedCombineMerge[2]" -> "PackedCombineMerge[2]";
"ExtractFromDict[CacheableCombineMerge[y/mean_and_var]]" [label="{ExtractFromDict|keys: CacheableCombineMerge[y/mean_and_var]|label: ExtractFromDict[CacheableCombineMerge[y/mean_and_var]]|partitionable: True}"];
"PackedCombineMerge[2]" -> "ExtractFromDict[CacheableCombineMerge[y/mean_and_var]]";
"ExtractPackedCombineMergeOutputs[CacheableCombineMerge[y/mean_and_var]]" [label="{ExtractPackedCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractPackedCombineMergeOutputs[CacheableCombineMerge[y/mean_and_var]]|{<0>0|<1>1}}"];
"ExtractFromDict[CacheableCombineMerge[y/mean_and_var]]" -> "ExtractPackedCombineMergeOutputs[CacheableCombineMerge[y/mean_and_var]]";
"CreateTensorBinding[y/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder]}"];
"ExtractPackedCombineMergeOutputs[CacheableCombineMerge[y/mean_and_var]]":0 -> "CreateTensorBinding[y/mean_and_var/Placeholder]";
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder_1]}"];
"ExtractPackedCombineMergeOutputs[CacheableCombineMerge[y/mean_and_var]]":1 -> "CreateTensorBinding[y/mean_and_var/Placeholder_1]";
"TensorSource[z]" [label="{ExtractFromDict|keys: ('z/Reshape',)|label: TensorSource[z]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "TensorSource[z]";
"VocabularyAccumulate[z]" [label="{VocabularyAccumulate|vocab_ordering_type: 1|input_dtype: string|label: VocabularyAccumulate[z]|partitionable: True}"];
"TensorSource[z]" -> "VocabularyAccumulate[z]";
"VocabularyMerge[z]" [label="{VocabularyMerge|vocab_ordering_type: 1|use_adjusted_mutual_info: False|min_diff_from_avg: None|label: VocabularyMerge[z]}"];
"VocabularyAccumulate[z]" -> "VocabularyMerge[z]";
"VocabularyCount[z]" [label="{VocabularyCount|label: VocabularyCount[z]}"];
"VocabularyMerge[z]" -> "VocabularyCount[z]";
"CreateTensorBinding[z/vocab_z_unpruned_vocab_size]" [label="{CreateTensorBinding|tensor: z/vocab_z_unpruned_vocab_size:0|is_asset_filepath: False|label: CreateTensorBinding[z/vocab_z_unpruned_vocab_size]}"];
"VocabularyCount[z]" -> "CreateTensorBinding[z/vocab_z_unpruned_vocab_size]";
"VocabularyPrune[z]" [label="{VocabularyPrune|top_k: None|frequency_threshold: 0|informativeness_threshold: -inf|coverage_top_k: None|coverage_frequency_threshold: 0|coverage_informativeness_threshold: -inf|key_fn: None|filter_newline_characters: True|label: VocabularyPrune[z]}"];
"VocabularyMerge[z]" -> "VocabularyPrune[z]";
"VocabularyOrderAndWrite[z]" [label="{VocabularyOrderAndWrite|vocab_filename: vocab_z|store_frequency: False|input_dtype: string|label: VocabularyOrderAndWrite[z]|fingerprint_shuffle: False|file_format: text}"];
"VocabularyPrune[z]" -> "VocabularyOrderAndWrite[z]";
"CreateTensorBinding[z/Placeholder]" [label="{CreateTensorBinding|tensor: z/Placeholder:0|is_asset_filepath: True|label: CreateTensorBinding[z/Placeholder]}"];
"VocabularyOrderAndWrite[z]" -> "CreateTensorBinding[z/Placeholder]";
"ExtractFromDict[CacheableCombineMerge[x/mean_and_var]]" [label="{ExtractFromDict|keys: CacheableCombineMerge[x/mean_and_var]|label: ExtractFromDict[CacheableCombineMerge[x/mean_and_var]]|partitionable: True}"];
"PackedCombineMerge[2]" -> "ExtractFromDict[CacheableCombineMerge[x/mean_and_var]]";
"ExtractPackedCombineMergeOutputs[CacheableCombineMerge[x/mean_and_var]]" [label="{ExtractPackedCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractPackedCombineMergeOutputs[CacheableCombineMerge[x/mean_and_var]]|{<0>0|<1>1}}"];
"ExtractFromDict[CacheableCombineMerge[x/mean_and_var]]" -> "ExtractPackedCombineMergeOutputs[CacheableCombineMerge[x/mean_and_var]]";
"CreateTensorBinding[x/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder]}"];
"ExtractPackedCombineMergeOutputs[CacheableCombineMerge[x/mean_and_var]]":0 -> "CreateTensorBinding[x/mean_and_var/Placeholder]";
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder_1]}"];
"ExtractPackedCombineMergeOutputs[CacheableCombineMerge[x/mean_and_var]]":1 -> "CreateTensorBinding[x/mean_and_var/Placeholder_1]";
CreateSavedModel [label="{CreateSavedModel|table_initializers: 1|output_signature: OrderedDict([('x_centered', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\"), ('y_centered', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\"), ('z_integerized', \"Tensor\<shape: [None], \<dtype: 'int64'\>\>\")])|label: CreateSavedModel}"];
"CreateTensorBinding[y/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[z/vocab_z_unpruned_vocab_size]" -> CreateSavedModel;
"CreateTensorBinding[z/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[x/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" -> CreateSavedModel;
}
""")
def _preprocessing_fn_with_packable_analyzer_two_phases(inputs):
x, y = inputs['x'], inputs['y']
x_mean = tft.mean(x, name='x')
x_square_deviations = tf.square(x - x_mean)
x_var = tft.mean(x_square_deviations, name='x_square_deviations')
x_normalized = (x - x_mean) / tf.sqrt(x_var)
y_mean = tft.mean(y, name='y')
y_square_deviations = tf.square(y - y_mean)
y_var = tft.mean(y_square_deviations, name='y_square_deviations')
y_normalized = (y - y_mean) / tf.sqrt(y_var)
return {'x_normalized': x_normalized, 'y_normalized': y_normalized}
_PACKABLE_ANALYZER_TWO_PHASES_CASE = dict(
testcase_name='with_packable_analyzer_two_phases',
feature_spec={
'x': tf.io.FixedLenFeature([], tf.float32),
'y': tf.io.FixedLenFeature([], tf.float32)
},
preprocessing_fn=_preprocessing_fn_with_packable_analyzer_two_phases,
num_phases=2,
expected_dot_graph_str_before_packing=r"""digraph G {
directed=True;
node [shape=Mrecord];
"CreateSavedModelForAnalyzerInputs[Phase0]" [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\")])|label: CreateSavedModelForAnalyzerInputs[Phase0]}"];
"ExtractInputForSavedModel[FlattenedDataset]" [label="{ExtractInputForSavedModel|dataset_key: DatasetKey(key='FlattenedDataset')|label: ExtractInputForSavedModel[FlattenedDataset]}"];
"ApplySavedModel[Phase0]" [label="{ApplySavedModel|phase: 0|label: ApplySavedModel[Phase0]|partitionable: True}"];
"CreateSavedModelForAnalyzerInputs[Phase0]" -> "ApplySavedModel[Phase0]";
"ExtractInputForSavedModel[FlattenedDataset]" -> "ApplySavedModel[Phase0]";
"TensorSource[x/mean_and_var]" [label="{ExtractFromDict|keys: ('x/mean_and_var/Cast', 'x/mean_and_var/truediv', 'x/mean_and_var/truediv_1', 'x/mean_and_var/zeros')|label: TensorSource[x/mean_and_var]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "TensorSource[x/mean_and_var]";
"CacheableCombineAccumulate[x/mean_and_var]" [label="{CacheableCombineAccumulate|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineAccumulate[x/mean_and_var]|partitionable: True}"];
"TensorSource[x/mean_and_var]" -> "CacheableCombineAccumulate[x/mean_and_var]";
"CacheableCombineMerge[x/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[x/mean_and_var]}"];
"CacheableCombineAccumulate[x/mean_and_var]" -> "CacheableCombineMerge[x/mean_and_var]";
"ExtractCombineMergeOutputs[x/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[x/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[x/mean_and_var]" -> "ExtractCombineMergeOutputs[x/mean_and_var]";
"CreateTensorBinding[x/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[x/mean_and_var]":0 -> "CreateTensorBinding[x/mean_and_var/Placeholder]";
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[x/mean_and_var]":1 -> "CreateTensorBinding[x/mean_and_var/Placeholder_1]";
"TensorSource[y/mean_and_var]" [label="{ExtractFromDict|keys: ('y/mean_and_var/Cast', 'y/mean_and_var/truediv', 'y/mean_and_var/truediv_1', 'y/mean_and_var/zeros')|label: TensorSource[y/mean_and_var]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "TensorSource[y/mean_and_var]";
"CacheableCombineAccumulate[y/mean_and_var]" [label="{CacheableCombineAccumulate|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineAccumulate[y/mean_and_var]|partitionable: True}"];
"TensorSource[y/mean_and_var]" -> "CacheableCombineAccumulate[y/mean_and_var]";
"CacheableCombineMerge[y/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[y/mean_and_var]}"];
"CacheableCombineAccumulate[y/mean_and_var]" -> "CacheableCombineMerge[y/mean_and_var]";
"ExtractCombineMergeOutputs[y/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[y/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[y/mean_and_var]" -> "ExtractCombineMergeOutputs[y/mean_and_var]";
"CreateTensorBinding[y/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[y/mean_and_var]":0 -> "CreateTensorBinding[y/mean_and_var/Placeholder]";
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[y/mean_and_var]":1 -> "CreateTensorBinding[y/mean_and_var/Placeholder_1]";
"CreateSavedModelForAnalyzerInputs[Phase1]" [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x_square_deviations/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x_square_deviations/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x_square_deviations/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x_square_deviations/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\")])|label: CreateSavedModelForAnalyzerInputs[Phase1]}"];
"CreateTensorBinding[x/mean_and_var/Placeholder]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"CreateTensorBinding[y/mean_and_var/Placeholder]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"ApplySavedModel[Phase1]" [label="{ApplySavedModel|phase: 1|label: ApplySavedModel[Phase1]|partitionable: True}"];
"CreateSavedModelForAnalyzerInputs[Phase1]" -> "ApplySavedModel[Phase1]";
"ExtractInputForSavedModel[FlattenedDataset]" -> "ApplySavedModel[Phase1]";
"TensorSource[x_square_deviations/mean_and_var]" [label="{ExtractFromDict|keys: ('x_square_deviations/mean_and_var/Cast', 'x_square_deviations/mean_and_var/truediv', 'x_square_deviations/mean_and_var/truediv_1', 'x_square_deviations/mean_and_var/zeros')|label: TensorSource[x_square_deviations/mean_and_var]|partitionable: True}"];
"ApplySavedModel[Phase1]" -> "TensorSource[x_square_deviations/mean_and_var]";
"CacheableCombineAccumulate[x_square_deviations/mean_and_var]" [label="{CacheableCombineAccumulate|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineAccumulate[x_square_deviations/mean_and_var]|partitionable: True}"];
"TensorSource[x_square_deviations/mean_and_var]" -> "CacheableCombineAccumulate[x_square_deviations/mean_and_var]";
"CacheableCombineMerge[x_square_deviations/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[x_square_deviations/mean_and_var]}"];
"CacheableCombineAccumulate[x_square_deviations/mean_and_var]" -> "CacheableCombineMerge[x_square_deviations/mean_and_var]";
"ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[x_square_deviations/mean_and_var]" -> "ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]";
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: x_square_deviations/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]":0 -> "CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]";
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: x_square_deviations/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]":1 -> "CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]";
"TensorSource[y_square_deviations/mean_and_var]" [label="{ExtractFromDict|keys: ('y_square_deviations/mean_and_var/Cast', 'y_square_deviations/mean_and_var/truediv', 'y_square_deviations/mean_and_var/truediv_1', 'y_square_deviations/mean_and_var/zeros')|label: TensorSource[y_square_deviations/mean_and_var]|partitionable: True}"];
"ApplySavedModel[Phase1]" -> "TensorSource[y_square_deviations/mean_and_var]";
"CacheableCombineAccumulate[y_square_deviations/mean_and_var]" [label="{CacheableCombineAccumulate|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineAccumulate[y_square_deviations/mean_and_var]|partitionable: True}"];
"TensorSource[y_square_deviations/mean_and_var]" -> "CacheableCombineAccumulate[y_square_deviations/mean_and_var]";
"CacheableCombineMerge[y_square_deviations/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[y_square_deviations/mean_and_var]}"];
"CacheableCombineAccumulate[y_square_deviations/mean_and_var]" -> "CacheableCombineMerge[y_square_deviations/mean_and_var]";
"ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[y_square_deviations/mean_and_var]" -> "ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]";
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: y_square_deviations/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]":0 -> "CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]";
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: y_square_deviations/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]":1 -> "CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]";
CreateSavedModel [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x_normalized', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\"), ('y_normalized', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\")])|label: CreateSavedModel}"];
"CreateTensorBinding[x/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[y/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]" -> CreateSavedModel;
}
""",
expected_dot_graph_str_after_packing=r"""digraph G {
directed=True;
node [shape=Mrecord];
"CreateSavedModelForAnalyzerInputs[Phase0]" [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\")])|label: CreateSavedModelForAnalyzerInputs[Phase0]}"];
"ExtractInputForSavedModel[FlattenedDataset]" [label="{ExtractInputForSavedModel|dataset_key: DatasetKey(key='FlattenedDataset')|label: ExtractInputForSavedModel[FlattenedDataset]}"];
"ApplySavedModel[Phase0]" [label="{ApplySavedModel|phase: 0|label: ApplySavedModel[Phase0]|partitionable: True}"];
"CreateSavedModelForAnalyzerInputs[Phase0]" -> "ApplySavedModel[Phase0]";
"ExtractInputForSavedModel[FlattenedDataset]" -> "ApplySavedModel[Phase0]";
"PackedCombineAccumulate[ApplySavedModel[Phase0]]" [label="{PackedCombineAccumulate|combiners: [_CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('x/mean_and_var/Cast', 'x/mean_and_var/truediv', 'x/mean_and_var/truediv_1', 'x/mean_and_var/zeros'), label='CacheableCombineAccumulate[x/mean_and_var]'), _CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('y/mean_and_var/Cast', 'y/mean_and_var/truediv', 'y/mean_and_var/truediv_1', 'y/mean_and_var/zeros'), label='CacheableCombineAccumulate[y/mean_and_var]')]|label: PackedCombineAccumulate[ApplySavedModel[Phase0]]|partitionable: True}"];
"ApplySavedModel[Phase0]" -> "PackedCombineAccumulate[ApplySavedModel[Phase0]]";
"CacheableCombineAccumulate[x/mean_and_var]" [label="{ExtractFromDict|keys: CacheableCombineAccumulate[x/mean_and_var]|label: CacheableCombineAccumulate[x/mean_and_var]|partitionable: True}"];
"PackedCombineAccumulate[ApplySavedModel[Phase0]]" -> "CacheableCombineAccumulate[x/mean_and_var]";
"CacheableCombineMerge[x/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[x/mean_and_var]}"];
"CacheableCombineAccumulate[x/mean_and_var]" -> "CacheableCombineMerge[x/mean_and_var]";
"ExtractCombineMergeOutputs[x/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[x/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[x/mean_and_var]" -> "ExtractCombineMergeOutputs[x/mean_and_var]";
"CreateTensorBinding[x/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[x/mean_and_var]":0 -> "CreateTensorBinding[x/mean_and_var/Placeholder]";
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: x/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[x/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[x/mean_and_var]":1 -> "CreateTensorBinding[x/mean_and_var/Placeholder_1]";
"CacheableCombineAccumulate[y/mean_and_var]" [label="{ExtractFromDict|keys: CacheableCombineAccumulate[y/mean_and_var]|label: CacheableCombineAccumulate[y/mean_and_var]|partitionable: True}"];
"PackedCombineAccumulate[ApplySavedModel[Phase0]]" -> "CacheableCombineAccumulate[y/mean_and_var]";
"CacheableCombineMerge[y/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[y/mean_and_var]}"];
"CacheableCombineAccumulate[y/mean_and_var]" -> "CacheableCombineMerge[y/mean_and_var]";
"ExtractCombineMergeOutputs[y/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[y/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[y/mean_and_var]" -> "ExtractCombineMergeOutputs[y/mean_and_var]";
"CreateTensorBinding[y/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[y/mean_and_var]":0 -> "CreateTensorBinding[y/mean_and_var/Placeholder]";
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: y/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[y/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[y/mean_and_var]":1 -> "CreateTensorBinding[y/mean_and_var/Placeholder_1]";
"CreateSavedModelForAnalyzerInputs[Phase1]" [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x_square_deviations/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x_square_deviations/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x_square_deviations/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('x_square_deviations/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/Cast', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/truediv', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/truediv_1', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\"), ('y_square_deviations/mean_and_var/zeros', \"Tensor\<shape: [], \<dtype: 'float32'\>\>\")])|label: CreateSavedModelForAnalyzerInputs[Phase1]}"];
"CreateTensorBinding[x/mean_and_var/Placeholder]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"CreateTensorBinding[y/mean_and_var/Placeholder]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" -> "CreateSavedModelForAnalyzerInputs[Phase1]";
"ApplySavedModel[Phase1]" [label="{ApplySavedModel|phase: 1|label: ApplySavedModel[Phase1]|partitionable: True}"];
"CreateSavedModelForAnalyzerInputs[Phase1]" -> "ApplySavedModel[Phase1]";
"ExtractInputForSavedModel[FlattenedDataset]" -> "ApplySavedModel[Phase1]";
"PackedCombineAccumulate[ApplySavedModel[Phase1]]" [label="{PackedCombineAccumulate|combiners: [_CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('x_square_deviations/mean_and_var/Cast', 'x_square_deviations/mean_and_var/truediv', 'x_square_deviations/mean_and_var/truediv_1', 'x_square_deviations/mean_and_var/zeros'), label='CacheableCombineAccumulate[x_square_deviations/mean_and_var]'), _CombinerOpWrapper(combiner=\<WeightedMeanAndVarCombiner\>, keys=('y_square_deviations/mean_and_var/Cast', 'y_square_deviations/mean_and_var/truediv', 'y_square_deviations/mean_and_var/truediv_1', 'y_square_deviations/mean_and_var/zeros'), label='CacheableCombineAccumulate[y_square_deviations/mean_and_var]')]|label: PackedCombineAccumulate[ApplySavedModel[Phase1]]|partitionable: True}"];
"ApplySavedModel[Phase1]" -> "PackedCombineAccumulate[ApplySavedModel[Phase1]]";
"CacheableCombineAccumulate[x_square_deviations/mean_and_var]" [label="{ExtractFromDict|keys: CacheableCombineAccumulate[x_square_deviations/mean_and_var]|label: CacheableCombineAccumulate[x_square_deviations/mean_and_var]|partitionable: True}"];
"PackedCombineAccumulate[ApplySavedModel[Phase1]]" -> "CacheableCombineAccumulate[x_square_deviations/mean_and_var]";
"CacheableCombineMerge[x_square_deviations/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[x_square_deviations/mean_and_var]}"];
"CacheableCombineAccumulate[x_square_deviations/mean_and_var]" -> "CacheableCombineMerge[x_square_deviations/mean_and_var]";
"ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[x_square_deviations/mean_and_var]" -> "ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]";
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: x_square_deviations/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]":0 -> "CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]";
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: x_square_deviations/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[x_square_deviations/mean_and_var]":1 -> "CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]";
"CacheableCombineAccumulate[y_square_deviations/mean_and_var]" [label="{ExtractFromDict|keys: CacheableCombineAccumulate[y_square_deviations/mean_and_var]|label: CacheableCombineAccumulate[y_square_deviations/mean_and_var]|partitionable: True}"];
"PackedCombineAccumulate[ApplySavedModel[Phase1]]" -> "CacheableCombineAccumulate[y_square_deviations/mean_and_var]";
"CacheableCombineMerge[y_square_deviations/mean_and_var]" [label="{CacheableCombineMerge|combiner: \<WeightedMeanAndVarCombiner\>|label: CacheableCombineMerge[y_square_deviations/mean_and_var]}"];
"CacheableCombineAccumulate[y_square_deviations/mean_and_var]" -> "CacheableCombineMerge[y_square_deviations/mean_and_var]";
"ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]" [label="{ExtractCombineMergeOutputs|output_tensor_info_list: [TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None), TensorInfo(dtype=tf.float32, shape=(), temporary_asset_value=None)]|label: ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]|{<0>0|<1>1}}"];
"CacheableCombineMerge[y_square_deviations/mean_and_var]" -> "ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]";
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]" [label="{CreateTensorBinding|tensor: y_square_deviations/mean_and_var/Placeholder:0|is_asset_filepath: False|label: CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]}"];
"ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]":0 -> "CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]";
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]" [label="{CreateTensorBinding|tensor: y_square_deviations/mean_and_var/Placeholder_1:0|is_asset_filepath: False|label: CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]}"];
"ExtractCombineMergeOutputs[y_square_deviations/mean_and_var]":1 -> "CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]";
CreateSavedModel [label="{CreateSavedModel|table_initializers: 0|output_signature: OrderedDict([('x_normalized', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\"), ('y_normalized', \"Tensor\<shape: [None], \<dtype: 'float32'\>\>\")])|label: CreateSavedModel}"];
"CreateTensorBinding[x/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[x/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[y/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[y/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[x_square_deviations/mean_and_var/Placeholder_1]" -> CreateSavedModel;
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder]" -> CreateSavedModel;
"CreateTensorBinding[y_square_deviations/mean_and_var/Placeholder_1]" -> CreateSavedModel;
}
""")
_COMBINER_PACKING_TEST_CASES = [
_PACKABLE_ANALYZER_SINGLE_PHASE_CASE,
_PACKABLE_ANALYZER_TWO_PHASES_CASE,
]
class CombinerPackingUtilTest(test_case.TransformTestCase):
@test_case.named_parameters(*_COMBINER_PACKING_TEST_CASES)
def test_perform_combiner_packing_optimization(
self, feature_spec, preprocessing_fn, num_phases,
expected_dot_graph_str_before_packing,
expected_dot_graph_str_after_packing):
graph, structured_inputs, structured_outputs = (
impl_helper.trace_preprocessing_function(
preprocessing_fn, feature_spec, use_tf_compat_v1=True))
def _side_effect_fn(saved_model_future, cache_value_nodes,
unused_num_phases):
return (saved_model_future, cache_value_nodes)
with mock.patch.object(
combiner_packing_util,
'perform_combiner_packing_optimization',
side_effect=_side_effect_fn):
transform_fn_future_before, unused_cache = analysis_graph_builder.build(
graph, structured_inputs, structured_outputs)
transform_fn_future_after, unused_cache = (
combiner_packing_util.perform_combiner_packing_optimization(
transform_fn_future_before, unused_cache, num_phases))
dot_string_before = nodes.get_dot_graph(
[transform_fn_future_before]).to_string()
self.assertMultiLineEqual(
msg='Result dot graph is:\n{}'.format(dot_string_before),
first=dot_string_before,
second=expected_dot_graph_str_before_packing)
dot_string_after = nodes.get_dot_graph(
[transform_fn_future_after]).to_string()
self.WriteRenderedDotFile(dot_string_after)
self.assertMultiLineEqual(
msg='Result dot graph is:\n{}'.format(dot_string_after),
first=dot_string_after,
second=expected_dot_graph_str_after_packing)
if __name__ == '__main__':
test_case.main()
| 116.177057
| 914
| 0.798549
| 5,325
| 46,587
| 6.64939
| 0.049577
| 0.085404
| 0.122006
| 0.045978
| 0.935495
| 0.922927
| 0.912506
| 0.877316
| 0.860935
| 0.849441
| 0
| 0.010926
| 0.047159
| 46,587
| 400
| 915
| 116.4675
| 0.786731
| 0.013888
| 0
| 0.657534
| 0
| 0.290411
| 0.907254
| 0.754056
| 0
| 0
| 0
| 0
| 0.005479
| 1
| 0.010959
| false
| 0
| 0.030137
| 0.00274
| 0.052055
| 0.008219
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0a9e25fd4270c173798eaea9058bbf6f783693b4
| 207
|
py
|
Python
|
src/Application/App.py
|
xperemiquel/python-app-exercise
|
4b35139dac029bbf39ea17cd5f33785f014c9ab7
|
[
"MIT"
] | null | null | null |
src/Application/App.py
|
xperemiquel/python-app-exercise
|
4b35139dac029bbf39ea17cd5f33785f014c9ab7
|
[
"MIT"
] | null | null | null |
src/Application/App.py
|
xperemiquel/python-app-exercise
|
4b35139dac029bbf39ea17cd5f33785f014c9ab7
|
[
"MIT"
] | null | null | null |
from src.Services.TodoService import TodoService
class App:
def __init__(self):
self._todo_service = TodoService()
def todo_service(self) -> TodoService:
return self._todo_service
| 20.7
| 48
| 0.710145
| 24
| 207
| 5.75
| 0.541667
| 0.23913
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21256
| 207
| 9
| 49
| 23
| 0.846626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0abc8a8533cdeda7c795d2871bd044db24ecbce6
| 173
|
py
|
Python
|
website/orders/forms.py
|
AntonBalmakov/Shop-Yes-Bouquets
|
d0365c2a4a870d80aea65fa75e7a0ef29227d815
|
[
"MIT"
] | null | null | null |
website/orders/forms.py
|
AntonBalmakov/Shop-Yes-Bouquets
|
d0365c2a4a870d80aea65fa75e7a0ef29227d815
|
[
"MIT"
] | 8
|
2021-03-30T13:26:02.000Z
|
2022-03-12T00:28:14.000Z
|
website/orders/forms.py
|
AntonBalmakov/Shop-Yes-Bouquets
|
d0365c2a4a870d80aea65fa75e7a0ef29227d815
|
[
"MIT"
] | null | null | null |
from django import forms
from .models import *
class CheckoutContactForm(forms.Form):
name = forms.CharField(required=True)
phone = forms.CharField(required=True)
| 21.625
| 42
| 0.757225
| 21
| 173
| 6.238095
| 0.619048
| 0.21374
| 0.335878
| 0.396947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150289
| 173
| 7
| 43
| 24.714286
| 0.891156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7c1f0b388c2015449ec542f360ab879b4972109f
| 223
|
py
|
Python
|
torch/distributed/fsdp/__init__.py
|
metacpp/pytorch
|
1e7a4d6bbe1fac4fb94f6b62f24c6e242db1e952
|
[
"Intel"
] | 1
|
2022-03-02T00:28:04.000Z
|
2022-03-02T00:28:04.000Z
|
torch/distributed/fsdp/__init__.py
|
metacpp/pytorch
|
1e7a4d6bbe1fac4fb94f6b62f24c6e242db1e952
|
[
"Intel"
] | 1
|
2022-03-01T06:10:50.000Z
|
2022-03-01T06:10:50.000Z
|
torch/distributed/fsdp/__init__.py
|
metacpp/pytorch
|
1e7a4d6bbe1fac4fb94f6b62f24c6e242db1e952
|
[
"Intel"
] | null | null | null |
from .flatten_params_wrapper import FlatParameter
from .fully_sharded_data_parallel import FullyShardedDataParallel
from .fully_sharded_data_parallel import CPUOffload
from .fully_sharded_data_parallel import StateDictType
| 44.6
| 65
| 0.910314
| 27
| 223
| 7.111111
| 0.481481
| 0.140625
| 0.25
| 0.3125
| 0.53125
| 0.53125
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071749
| 223
| 4
| 66
| 55.75
| 0.927536
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7c2db5d9f4271dbe3d99a0b54d24d3a60fb9e9d7
| 16,773
|
py
|
Python
|
test/data/array/test_mask.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/data/array/test_mask.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/data/array/test_mask.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
import shutil
import os
import numpy as np
import pytest
from autolens.data.array.util import mask_util as util
from autolens.data.array.util import mapping_util
from autolens.data.array import mask as msk
test_data_dir = "{}/../../test_files/array/".format(os.path.dirname(os.path.realpath(__file__)))
class TestMask:
def test__constructor(self):
mask = np.array([[True, True, True, True],
[True, False, False, True],
[True, True, True, True]])
mask = msk.Mask(mask, pixel_scale=1)
assert (mask == np.array([[True, True, True, True],
[True, False, False, True],
[True, True, True, True]])).all()
assert mask.pixel_scale == 1.0
assert mask.central_pixel_coordinates == (1.0, 1.5)
assert mask.shape == (3, 4)
assert mask.shape_arc_seconds == (3.0, 4.0)
def test__array_finalize__masks_pass_attributes(self):
mask = np.array([[True, True, True, True],
[True, False, False, True],
[True, True, True, True]])
mask = msk.Mask(mask, pixel_scale=1)
mask_new = mask + mask
assert mask_new.pixel_scale == 1.0
assert mask_new.origin == (0.0, 0.0)
assert mask_new.centre == (0.0, 0.0)
class TestMaskShapes:
def test__mask_all_unmasked__5x5__input__all_are_false(self):
mask = msk.Mask.unmasked_for_shape_and_pixel_scale(shape=(5, 5), pixel_scale=1.5, invert=False)
assert mask.shape == (5, 5)
assert (mask == np.array([[False, False, False, False, False],
[False, False, False, False, False],
[False, False, False, False, False],
[False, False, False, False, False],
[False, False, False, False, False]])).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (0.0, 0.0)
def test__mask_all_unmasked_inverted__5x5__input__all_are_true(self):
mask = msk.Mask.unmasked_for_shape_and_pixel_scale(shape=(5, 5), pixel_scale=1, invert=True)
assert mask.shape == (5, 5)
assert (mask == np.array([[True, True, True, True, True],
[True, True, True, True, True],
[True, True, True, True, True],
[True, True, True, True, True],
[True, True, True, True, True]])).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (0.0, 0.0)
def test__mask_circular__compare_to_array_util(self):
mask_util = util.mask_circular_from_shape_pixel_scale_and_radius(shape=(5, 4), pixel_scale=2.7,
radius_arcsec=3.5, centre=(1.0, 1.0))
mask = msk.Mask.circular(shape=(5, 4), pixel_scale=2.7, radius_arcsec=3.5, centre=(1.0, 1.0))
assert (mask == mask_util).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_circular__inverted__compare_to_array_util(self):
mask_util = util.mask_circular_from_shape_pixel_scale_and_radius(shape=(5, 4), pixel_scale=2.7,
radius_arcsec=3.5, centre=(1.0, 1.0))
mask = msk.Mask.circular(shape=(5, 4), pixel_scale=2.7, radius_arcsec=3.5, centre=(1.0, 1.0), invert=True)
assert (mask == np.invert(mask_util)).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_annulus__compare_to_array_util(self):
mask_util = util.mask_circular_annular_from_shape_pixel_scale_and_radii(shape=(5, 4), pixel_scale=2.7,
inner_radius_arcsec=0.8,
outer_radius_arcsec=3.5,
centre=(1.0, 1.0))
mask = msk.Mask.circular_annular(shape=(5, 4), pixel_scale=2.7, inner_radius_arcsec=0.8, outer_radius_arcsec=3.5,
centre=(1.0, 1.0))
assert (mask == mask_util).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_annulus_inverted__compare_to_array_util(self):
mask_util = util.mask_circular_annular_from_shape_pixel_scale_and_radii(shape=(5, 4), pixel_scale=2.7,
inner_radius_arcsec=0.8,
outer_radius_arcsec=3.5,
centre=(1.0, 1.0))
mask = msk.Mask.circular_annular(shape=(5, 4), pixel_scale=2.7, inner_radius_arcsec=0.8,
outer_radius_arcsec=3.5,
centre=(1.0, 1.0), invert=True)
assert (mask == np.invert(mask_util)).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_anti_annulus__compare_to_array_util(self):
mask_util = util.mask_circular_anti_annular_from_shape_pixel_scale_and_radii(shape=(9, 9), pixel_scale=1.2,
inner_radius_arcsec=0.8,
outer_radius_arcsec=2.2,
outer_radius_2_arcsec=3.0,
centre=(1.0, 1.0))
mask = msk.Mask.circular_anti_annular(shape=(9, 9), pixel_scale=1.2, inner_radius_arcsec=0.8,
outer_radius_arcsec=2.2, outer_radius_2_arcsec=3.0, centre=(1.0, 1.0))
assert (mask == mask_util).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_anti_annulus_inverted__compare_to_array_util(self):
mask_util = util.mask_circular_anti_annular_from_shape_pixel_scale_and_radii(shape=(9, 9), pixel_scale=1.2,
inner_radius_arcsec=0.8,
outer_radius_arcsec=2.2,
outer_radius_2_arcsec=3.0,
centre=(1.0, 1.0))
mask = msk.Mask.circular_anti_annular(shape=(9, 9), pixel_scale=1.2, inner_radius_arcsec=0.8,
outer_radius_arcsec=2.2, outer_radius_2_arcsec=3.0, centre=(1.0, 1.0),
invert=True)
assert (mask == np.invert(mask_util)).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_elliptical__compare_to_array_util(self):
mask_util = util.mask_elliptical_from_shape_pixel_scale_and_radius(shape=(8, 5), pixel_scale=2.7,
major_axis_radius_arcsec=5.7, axis_ratio=0.4, phi=40.0, centre=(1.0, 1.0))
mask = msk.Mask.elliptical(shape=(8, 5), pixel_scale=2.7,
major_axis_radius_arcsec=5.7, axis_ratio=0.4, phi=40.0, centre=(1.0, 1.0))
assert (mask == mask_util).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_elliptical_inverted__compare_to_array_util(self):
mask_util = util.mask_elliptical_from_shape_pixel_scale_and_radius(shape=(8, 5), pixel_scale=2.7,
major_axis_radius_arcsec=5.7, axis_ratio=0.4, phi=40.0, centre=(1.0, 1.0))
mask = msk.Mask.elliptical(shape=(8, 5), pixel_scale=2.7,
major_axis_radius_arcsec=5.7, axis_ratio=0.4, phi=40.0, centre=(1.0, 1.0), invert=True)
assert (mask == np.invert(mask_util)).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_elliptical_annular__compare_to_array_util(self):
mask_util = util.mask_elliptical_annular_from_shape_pixel_scale_and_radius(shape=(8, 5), pixel_scale=2.7,
inner_major_axis_radius_arcsec=2.1, inner_axis_ratio=0.6, inner_phi=20.0,
outer_major_axis_radius_arcsec=5.7, outer_axis_ratio=0.4, outer_phi=40.0, centre=(1.0, 1.0))
mask = msk.Mask.elliptical_annular(shape=(8, 5), pixel_scale=2.7,
inner_major_axis_radius_arcsec=2.1, inner_axis_ratio=0.6, inner_phi=20.0,
outer_major_axis_radius_arcsec=5.7, outer_axis_ratio=0.4, outer_phi=40.0, centre=(1.0, 1.0))
assert (mask == mask_util).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
def test__mask_elliptical_annular_inverted__compare_to_array_util(self):
mask_util = util.mask_elliptical_annular_from_shape_pixel_scale_and_radius(shape=(8, 5), pixel_scale=2.7,
inner_major_axis_radius_arcsec=2.1, inner_axis_ratio=0.6, inner_phi=20.0,
outer_major_axis_radius_arcsec=5.7, outer_axis_ratio=0.4, outer_phi=40.0, centre=(1.0, 1.0))
mask = msk.Mask.elliptical_annular(shape=(8, 5), pixel_scale=2.7,
inner_major_axis_radius_arcsec=2.1, inner_axis_ratio=0.6, inner_phi=20.0,
outer_major_axis_radius_arcsec=5.7, outer_axis_ratio=0.4, outer_phi=40.0, centre=(1.0, 1.0),
invert=True)
assert (mask == np.invert(mask_util)).all()
assert mask.origin == (0.0, 0.0)
assert mask.centre == (1.0, 1.0)
class TestMaskMappings:
def test__grid_to_pixel__compare_to_array_utill(self):
mask = np.array([[True, True, True],
[True, False, False],
[True, True, False]])
mask = msk.Mask(mask, pixel_scale=7.0)
grid_to_pixel_util = util.masked_grid_1d_index_to_2d_pixel_index_from_mask(mask)
assert mask.masked_grid_index_to_pixel == pytest.approx(grid_to_pixel_util, 1e-4)
def test__map_2d_array_to_masked_1d_array__compare_to_array_util(self):
array_2d = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
[10, 11, 12]])
mask = np.array([[True, False, True],
[False, False, False],
[True, False, True],
[True, True, True]])
array_1d_util = mapping_util.map_2d_array_to_masked_1d_array_from_array_2d_and_mask(mask, array_2d)
mask = msk.Mask(mask, pixel_scale=3.0)
array_1d = mask.map_2d_array_to_masked_1d_array(array_2d)
assert (array_1d == array_1d_util).all()
class TestMaskRegions:
def test__blurring_mask_for_psf_shape__compare_to_array_util(self):
mask = np.array([[True, True, True, True, True, True, True, True],
[True, False, True, True, True, False, True, True],
[True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True],
[True, False, True, True, True, False, True, True],
[True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True]])
blurring_mask_util = util.mask_blurring_from_mask_and_psf_shape(mask=mask, psf_shape=(3, 3))
mask = msk.Mask(mask, pixel_scale=1.0)
blurring_mask = mask.blurring_mask_for_psf_shape(psf_shape=(3, 3))
assert (blurring_mask == blurring_mask_util).all()
def test__edge_image_pixels__compare_to_array_util(self):
mask = np.array([[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, False, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True]])
edge_pixels_util = util.edge_pixels_from_mask(mask)
mask = msk.Mask(mask, pixel_scale=3.0)
assert mask.edge_pixels == pytest.approx(edge_pixels_util, 1e-4)
def test__border_image_pixels__compare_to_array_util(self):
mask = np.array([[False, False, False, False, False, False, False, True],
[False, True, True, True, True, True, False, True],
[False, True, False, False, False, True, False, True],
[False, True, False, True, False, True, False, True],
[False, True, False, False, False, True, False, True],
[False, True, True, True, True, True, False, True],
[False, False, False, False, False, False, False, True]])
border_pixels_util = util.border_pixels_from_mask(mask)
mask = msk.Mask(mask, pixel_scale=3.0)
assert mask.border_pixels == pytest.approx(border_pixels_util, 1e-4)
class TestMaskExtractor:
def test__mask_extract_region__uses_the_limits_of_the_mask(self):
mask = msk.Mask(array=np.array([[True, True, True, True],
[True, False, False, True],
[True, False, False, True],
[True, True, True, True]]), pixel_scale=1.0)
assert mask.extraction_region == [1,3,1,3]
mask = msk.Mask(array=np.array([[True, True, True, True],
[True, False, False, True],
[True, False, False, False],
[True, True, True, True]]), pixel_scale=1.0)
assert mask.extraction_region == [1,3,1,4]
mask = msk.Mask(array=np.array([[True, True, True, True],
[True, False, False, True],
[True, False, False, True],
[True, True, False, True]]), pixel_scale=1.0)
assert mask.extraction_region == [1,4,1,3]
mask = msk.Mask(array=np.array([[True, True, True, True],
[True, False, False, True],
[False, False, False, True],
[True, True, True, True]]), pixel_scale=1.0)
assert mask.extraction_region == [1,3,0,3]
mask = msk.Mask(array=np.array([[True, False, True, True],
[True, False, False, True],
[True, False, False, True],
[True, True, True, True]]), pixel_scale=1.0)
assert mask.extraction_region == [0,3,1,3]
class TestParse:
def test__load_mask_from_fits__loads_mask(self):
mask = msk.load_mask_from_fits(mask_path=test_data_dir + '3x3_ones.fits', pixel_scale=0.1)
assert (mask == np.ones((3,3))).all()
assert mask.pixel_scale == 0.1
def test__output_mask_to_fits__outputs_mask(self):
mask = msk.load_mask_from_fits(mask_path=test_data_dir + '3x3_ones.fits', pixel_scale=0.1)
output_data_dir = "{}/../../test_files/array/output_test/".format(os.path.dirname(os.path.realpath(__file__)))
if os.path.exists(output_data_dir):
shutil.rmtree(output_data_dir)
os.makedirs(output_data_dir)
msk.output_mask_to_fits(mask=mask, mask_path=output_data_dir + 'mask.fits')
mask = msk.load_mask_from_fits(mask_path=output_data_dir + 'mask.fits', pixel_scale=0.1)
assert (mask == np.ones((3,3))).all()
assert mask.pixel_scale == 0.1
| 47.381356
| 121
| 0.529184
| 2,159
| 16,773
| 3.823529
| 0.056971
| 0.204482
| 0.258752
| 0.290733
| 0.852211
| 0.823864
| 0.806784
| 0.786554
| 0.758449
| 0.741005
| 0
| 0.048888
| 0.353664
| 16,773
| 354
| 122
| 47.381356
| 0.712573
| 0
| 0
| 0.556911
| 0
| 0
| 0.006439
| 0.003815
| 0
| 0
| 0
| 0
| 0.243902
| 1
| 0.089431
| false
| 0.004065
| 0.028455
| 0
| 0.142276
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7c4e3ecafa082abf120e6e1da63b40ed299946b2
| 25,405
|
py
|
Python
|
pyloxi/loxi/of15/async_config_prop.py
|
floodlight/loxigen-artifacts
|
1822ec984cb6da342bbaa381677071cbbe53cee6
|
[
"Apache-2.0"
] | 1
|
2017-06-01T09:41:07.000Z
|
2017-06-01T09:41:07.000Z
|
pyloxi/loxi/of15/async_config_prop.py
|
floodlight/loxigen-artifacts
|
1822ec984cb6da342bbaa381677071cbbe53cee6
|
[
"Apache-2.0"
] | 2
|
2017-07-03T08:50:56.000Z
|
2018-03-12T16:16:19.000Z
|
pyloxi/loxi/of15/async_config_prop.py
|
floodlight/loxigen-artifacts
|
1822ec984cb6da342bbaa381677071cbbe53cee6
|
[
"Apache-2.0"
] | 20
|
2015-02-16T15:23:04.000Z
|
2022-03-15T20:06:10.000Z
|
# Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
from . import util
import loxi.generic_util
import sys
ofp = sys.modules['loxi.of15']
class async_config_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = async_config_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = async_config_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("async_config_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class cont_status_master(async_config_prop):
type = 15
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = cont_status_master()
_type = reader.read("!H")[0]
assert(_type == 15)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("cont_status_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[15] = cont_status_master
class cont_status_slave(async_config_prop):
type = 14
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = cont_status_slave()
_type = reader.read("!H")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("cont_status_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[14] = cont_status_slave
class experimenter_master(async_config_prop):
type = 65535
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = experimenter_master()
_type = reader.read("!H")[0]
assert(_type == 65535)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("experimenter_master {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
async_config_prop.subtypes[65535] = experimenter_master
class experimenter_slave(async_config_prop):
type = 65534
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = experimenter_slave()
_type = reader.read("!H")[0]
assert(_type == 65534)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("experimenter_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
async_config_prop.subtypes[65534] = experimenter_slave
class flow_removed_master(async_config_prop):
type = 5
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_removed_master()
_type = reader.read("!H")[0]
assert(_type == 5)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("flow_removed_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[5] = flow_removed_master
class flow_removed_slave(async_config_prop):
type = 4
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_removed_slave()
_type = reader.read("!H")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("flow_removed_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[4] = flow_removed_slave
class flow_stats_master(async_config_prop):
type = 13
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_stats_master()
_type = reader.read("!H")[0]
assert(_type == 13)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("flow_stats_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[13] = flow_stats_master
class flow_stats_slave(async_config_prop):
type = 12
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_stats_slave()
_type = reader.read("!H")[0]
assert(_type == 12)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("flow_stats_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[12] = flow_stats_slave
class packet_in_master(async_config_prop):
type = 1
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = packet_in_master()
_type = reader.read("!H")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("packet_in_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[1] = packet_in_master
class packet_in_slave(async_config_prop):
type = 0
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = packet_in_slave()
_type = reader.read("!H")[0]
assert(_type == 0)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("packet_in_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[0] = packet_in_slave
class port_status_master(async_config_prop):
type = 3
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_status_master()
_type = reader.read("!H")[0]
assert(_type == 3)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("port_status_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[3] = port_status_master
class port_status_slave(async_config_prop):
type = 2
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_status_slave()
_type = reader.read("!H")[0]
assert(_type == 2)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("port_status_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[2] = port_status_slave
class requestforward_master(async_config_prop):
type = 11
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = requestforward_master()
_type = reader.read("!H")[0]
assert(_type == 11)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("requestforward_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[11] = requestforward_master
class requestforward_slave(async_config_prop):
type = 10
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = requestforward_slave()
_type = reader.read("!H")[0]
assert(_type == 10)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("requestforward_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[10] = requestforward_slave
class role_status_master(async_config_prop):
type = 7
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = role_status_master()
_type = reader.read("!H")[0]
assert(_type == 7)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("role_status_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[7] = role_status_master
class role_status_slave(async_config_prop):
type = 6
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = role_status_slave()
_type = reader.read("!H")[0]
assert(_type == 6)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("role_status_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[6] = role_status_slave
class table_status_master(async_config_prop):
type = 9
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_status_master()
_type = reader.read("!H")[0]
assert(_type == 9)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("table_status_master {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[9] = table_status_master
class table_status_slave(async_config_prop):
type = 8
def __init__(self, mask=None):
if mask != None:
self.mask = mask
else:
self.mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.mask))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_status_slave()
_type = reader.read("!H")[0]
assert(_type == 8)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.mask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mask != other.mask: return False
return True
def pretty_print(self, q):
q.text("table_status_slave {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.breakable()
q.text('}')
async_config_prop.subtypes[8] = table_status_slave
| 28.449048
| 88
| 0.542019
| 3,171
| 25,405
| 4.187323
| 0.042889
| 0.05784
| 0.047221
| 0.089471
| 0.909248
| 0.888537
| 0.870161
| 0.870161
| 0.846438
| 0.827459
| 0
| 0.016511
| 0.320567
| 25,405
| 892
| 89
| 28.480942
| 0.752737
| 0.038851
| 0
| 0.828418
| 0
| 0
| 0.03362
| 0.000861
| 0
| 0
| 0
| 0
| 0.024129
| 1
| 0.127346
| false
| 0
| 0.006702
| 0.002681
| 0.288204
| 0.025469
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c9338f0b893d094ce07f7dd56094d485d1fb3a9
| 117
|
py
|
Python
|
output/__init__.py
|
joristork/milovision
|
aeb09b9c75f7bc0900cb513079bbe08b3c439bbc
|
[
"MIT"
] | 8
|
2015-01-04T14:45:04.000Z
|
2021-08-31T21:32:39.000Z
|
output/__init__.py
|
joristork/milovision
|
aeb09b9c75f7bc0900cb513079bbe08b3c439bbc
|
[
"MIT"
] | null | null | null |
output/__init__.py
|
joristork/milovision
|
aeb09b9c75f7bc0900cb513079bbe08b3c439bbc
|
[
"MIT"
] | 2
|
2016-03-21T08:19:32.000Z
|
2021-10-18T03:12:40.000Z
|
#!/usr/bin/env python -tt
# encoding: utf-8
from pipeline_output import Pipeline_Output
from printer import Printer
| 19.5
| 43
| 0.794872
| 18
| 117
| 5.055556
| 0.722222
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 0.128205
| 117
| 5
| 44
| 23.4
| 0.882353
| 0.34188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
7cbacc0a533d10de830f58a33c6f026607af6008
| 56,640
|
py
|
Python
|
local/tf/models.py
|
Alicegaz/x-vector-kaldi-tf
|
5249e7d5c499291efbca6c8e49ba197392ae0a28
|
[
"Apache-2.0"
] | 117
|
2018-11-27T11:55:51.000Z
|
2020-10-22T13:23:50.000Z
|
local/tf/models.py
|
LCF2764/x-vector-kaldi-tf
|
5249e7d5c499291efbca6c8e49ba197392ae0a28
|
[
"Apache-2.0"
] | 3
|
2019-01-08T10:23:03.000Z
|
2020-03-23T14:34:53.000Z
|
local/tf/models.py
|
LCF2764/x-vector-kaldi-tf
|
5249e7d5c499291efbca6c8e49ba197392ae0a28
|
[
"Apache-2.0"
] | 37
|
2018-11-27T15:38:47.000Z
|
2020-09-16T11:30:56.000Z
|
import os
import time
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
import numpy as np
import queue
import tensorflow as tf
import kaldi_io
from tf_block import batch_norm_wrapper, prelu
from ze_utils import set_cuda_visible_devices
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
VAR2STD_EPSILON = 0.00001
# noinspection PyAttributeOutsideInit
class Model(object):
def __init__(self):
self.graph = None
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 3 * 512]
kernel_sizes = [5, 5, 7, 1, 1]
embedding_sizes = [512, 512]
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[layer_size]), name="b")
conv = tf.nn.conv1d(h, w, stride=1, padding="SAME", name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
# Apply nonlinearity and BN
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
# Apply dropout
if i != len(kernel_sizes) - 1:
with tf.name_scope("dropout-%s" % i):
h = tf.nn.dropout(h, self.dropout_keep_prob)
# Statistic pooling
tf_mean, tf_var = tf.nn.moments(h, 1)
h = tf.concat([tf_mean, tf.sqrt(tf_var + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[out_dim]), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
if i != len(embedding_sizes) - 1:
with tf.name_scope("dropout-%s" % i):
h = tf.nn.dropout(h, self.dropout_keep_prob)
# Softmax
with tf.variable_scope("output"):
w = tf.get_variable("w", shape=[prev_dim, num_classes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b")
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
self.loss = tf.reduce_mean(losses, name="loss")
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
@staticmethod
def save_model(sess, output_dir, logger):
if logger is not None:
logger.info("Start saving graph ...")
saver = tf.train.Saver()
if not os.path.exists(output_dir):
os.makedirs(output_dir)
save_path = saver.save(sess, os.path.join(output_dir, 'model'))
with open(os.path.join(output_dir, 'done'), 'wt') as fid:
fid.write('done')
if logger is not None:
logger.info("Graph saved in path: %s" % save_path)
def load_model(self, sess, input_dir, logger):
if logger is not None:
logger.info("Start loading graph ...")
saver = tf.train.import_meta_graph(os.path.join(input_dir, 'model.meta'))
saver.restore(sess, os.path.join(input_dir, 'model'))
self.graph = sess.graph
self.input_x = self.graph.get_tensor_by_name("input_x:0")
self.input_y = self.graph.get_tensor_by_name("input_y:0")
self.num_classes = self.input_y.shape[1]
self.learning_rate = self.graph.get_tensor_by_name("learning_rate:0")
self.dropout_keep_prob = self.graph.get_tensor_by_name("dropout_keep_prob:0")
self.phase = self.graph.get_tensor_by_name("phase:0")
self.loss = self.graph.get_tensor_by_name("loss:0")
self.optimizer = self.graph.get_operation_by_name("optimizer")
self.accuracy = self.graph.get_tensor_by_name("accuracy/accuracy:0")
self.embedding = [None] * 2 # TODO make this more general
self.embedding[0] = self.graph.get_tensor_by_name("embed_layer-0/scores:0")
self.embedding[1] = self.graph.get_tensor_by_name("embed_layer-1/scores:0")
if logger is not None:
logger.info("Graph restored from path: %s" % input_dir)
def create_one_hot_output_matrix(self, labels):
minibatch_size = len(labels)
one_hot_matrix = np.zeros((minibatch_size, self.num_classes), dtype=np.int32)
for i, lab in enumerate(labels):
one_hot_matrix[i, lab] = 1
return one_hot_matrix
def print_models_params(self, input_dir, logger=None):
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)) as sess:
self.load_model(sess, input_dir, logger)
print('\n\nThe components are:\n')
for v in self.graph.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
print(v.name)
print('\n')
def get_models_weights(self, input_dir, logger=None):
import h5py
h5file = os.path.join(input_dir, 'model.h5')
if os.path.exists(h5file):
name2weights = {}
def add2weights(name, mat):
if not isinstance(mat, h5py.Group):
# print('%s shape: %s' % (name, str(mat.shape)))
name2weights[name] = mat.value
with h5py.File(h5file, 'r') as hf:
hf.visititems(add2weights)
return name2weights
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)) as sess:
self.load_model(sess, input_dir, logger)
name2weights = {}
for v in self.graph.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
name2weights[v.name] = sess.run(v)
print('%s shape: %s' % (v.name, str(name2weights[v.name].shape)))
for i in range(5):
for scope_name in ("frame_level_info_layer-%s" % i, "embed_layer-%s" % i):
for var_name in ("mean", "variance"):
name = '%s/%s:0' % (scope_name, var_name)
try:
name2weights[name] = sess.run(self.graph.get_tensor_by_name(name))
print('%s shape: %s' % (name, str(name2weights[name].shape)))
except:
pass
with h5py.File(h5file, 'w') as hf:
for name, mat in name2weights.iteritems():
hf.create_dataset(name, data=mat.astype(np.float32))
return name2weights
def train_one_iteration(self, data_loader, args, logger):
learning_rate = args.learning_rate
print_interval = args.print_interval
dropout_proportion = args.dropout_proportion
input_dir = args.input_dir
output_dir = args.output_dir
random_seed = 4 * args.random_seed + args.random_seed % 3
set_cuda_visible_devices(use_gpu=True, logger=logger)
with tf.Session(config=tf.ConfigProto(allow_soft_placement=False, log_device_placement=False)) as sess:
if random_seed != 0:
tf.set_random_seed(random_seed)
self.load_model(sess, input_dir, logger)
# Shuffle the data in each epoch
minibatch_count = data_loader.count
start_minibatch = 1
dropout_keep_prob = 1 - dropout_proportion
total_segments, minibatch_segments = 0, 0
total_loss, minibatch_loss = 0, 0
total_objective, minibatch_objective = 0, 0
total_accuracy, minibatch_accuracy = 0, 0
total_segments_len = 0
total_gpu_waiting = 0.0
total_disk_waiting = 0.0
start_time = time.time()
for minibatch_idx in range(minibatch_count):
try:
disk_waiting = time.time()
batch_data, labels = data_loader.pop()
total_disk_waiting += time.time() - disk_waiting
except queue.Empty:
logger.warning('Timeout reach when reading the minibatch index %d' % minibatch_idx)
continue
if batch_data is None:
logger.warning('batch_data is None for the minibatch index %d' % minibatch_idx)
continue
batch_labels = self.create_one_hot_output_matrix(labels)
minibatch_segments += batch_data.shape[0]
total_segments += batch_data.shape[0]
total_segments_len += batch_data.shape[1]
feed_dict = {self.input_x: batch_data, self.input_y: batch_labels,
self.dropout_keep_prob: dropout_keep_prob, self.learning_rate: learning_rate,
self.phase: True}
gpu_waiting = time.time()
_, loss, accuracy = sess.run([self.optimizer, self.loss, self.accuracy], feed_dict=feed_dict)
total_gpu_waiting += time.time() - gpu_waiting
objective = -loss
total_loss += loss
minibatch_loss += loss
total_objective += objective
minibatch_objective += objective
total_accuracy += accuracy
minibatch_accuracy += accuracy
end_minibatch = minibatch_idx + 1
if end_minibatch % print_interval == 0:
cnt = end_minibatch - start_minibatch + 1
logger.info("Average training loss for minibatches %d-%d is %.4f over %d segments. Also, the "
"average training accuracy for these minibatches is %.4f and the average "
"objective function for these minibatches is %.4f. Average DISK waiting: %.1f "
"secs and average GPU waiting: %.1f secs for each minibatch." %
(start_minibatch, end_minibatch, minibatch_loss / cnt,
minibatch_segments, minibatch_accuracy / cnt, minibatch_objective / cnt,
total_disk_waiting / cnt, total_gpu_waiting / cnt))
start_minibatch = end_minibatch + 1
minibatch_segments = 0
minibatch_loss = 0
minibatch_accuracy = 0
minibatch_objective = 0
total_gpu_waiting = 0.0
total_disk_waiting = 0.0
logger.info("Processed %d segments of average size %d into %d minibatches. Avg minibatch size was %d." %
(total_segments, total_segments_len / minibatch_count, minibatch_count,
total_segments / minibatch_count))
logger.info("Overall average training loss is %.4f over %d segments. Also, the overall "
"average training accuracy is %.4f." % (total_loss / minibatch_count,
total_segments, total_accuracy / minibatch_count))
logger.info("Overall average objective function is %.4f over %d segments." %
(total_objective / minibatch_count, total_segments))
Model.save_model(sess, output_dir, logger)
logger.info("Elapsed time for processing whole training minibatches is %.2f minutes." %
((time.time() - start_time) / 60.0))
def eval(self, data_loader, input_dir, use_gpu, logger):
set_cuda_visible_devices(use_gpu=use_gpu, logger=logger)
with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)) as sess:
self.load_model(sess, input_dir, logger)
# Shuffle the data in each epoch
minibatch_count = data_loader.count
total_segments = 0
total_loss = 0
total_accuracy = 0
total_segments_len = 0
total_gpu_waiting = 0.0
total_disk_waiting = 0.0
start_time = time.time()
for minibatch_idx in range(minibatch_count):
try:
disk_waiting = time.time()
batch_data, labels = data_loader.pop()
total_disk_waiting += time.time() - disk_waiting
except queue.Empty:
logger.warning('Timeout reach when reading minibatch index %d' % minibatch_idx)
continue
if batch_data is None:
logger.warning('batch_data is None for minibatch index %d' % minibatch_idx)
continue
batch_labels = self.create_one_hot_output_matrix(labels)
total_segments += batch_data.shape[0]
total_segments_len += batch_data.shape[1]
feed_dict = {self.input_x: batch_data, self.input_y: batch_labels, self.dropout_keep_prob: 1.0,
self.phase: False}
gpu_waiting = time.time()
loss, accuracy = sess.run([self.loss, self.accuracy], feed_dict=feed_dict)
total_gpu_waiting += time.time() - gpu_waiting
total_loss += loss
total_accuracy += accuracy
logger.info("Processed %d segments of average size %d into %d minibatches. Avg minibatch size was %d." %
(total_segments, total_segments_len / minibatch_count, minibatch_count,
total_segments / minibatch_count))
logger.info("Overall average loss is %.4f over %d segments. Also, the overall "
"average accuracy is %.4f." % (total_loss / minibatch_count, total_segments,
total_accuracy / minibatch_count))
logger.info("Elapsed time for processing whole training minibatches is %.2f minutes." %
((time.time() - start_time) / 60.0))
def make_embedding(self, input_stream, output_stream, model_dir, min_chunk_size, chunk_size, use_gpu, logger):
start_time = time.time()
set_cuda_visible_devices(use_gpu=use_gpu, logger=logger)
config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)
if not use_gpu:
config.intra_op_parallelism_threads = 2
config.inter_op_parallelism_threads = 2
with tf.Session(config=config) as sess:
self.load_model(sess, model_dir, logger)
total_segments = 0
total_segments_len = 0
total_gpu_waiting = 0.0
num_fail = 0
num_success = 0
for key, mat in kaldi_io.read_mat_ark(input_stream):
logger.info("Processing features with key '%s' which have shape '%s'" % (key, str(mat.shape)))
total_segments += 1
num_rows = mat.shape[0]
if num_rows == 0:
logger.warning("Zero-length utterance: '%s'" % key)
num_fail += 1
continue
if num_rows < min_chunk_size:
logger.warning("Minimum chunk size of %d is greater than the number of rows in utterance: %s" %
(min_chunk_size, key))
num_fail += 1
continue
this_chunk_size = chunk_size
if num_rows < chunk_size:
logger.info("Chunk size of %d is greater than the number of rows in utterance: %s, "
"using chunk size of %d" % (chunk_size, key, num_rows))
this_chunk_size = num_rows
elif chunk_size == -1:
this_chunk_size = num_rows
num_chunks = int(np.ceil(num_rows / float(this_chunk_size)))
# logger.info("num_chunks: %d" % num_chunks)
xvector_avg = 0
tot_weight = 0.0
for chunk_idx in range(num_chunks):
# If we're nearing the end of the input, we may need to shift the
# offset back so that we can get this_chunk_size frames of input to
# the nnet.
offset = min(this_chunk_size, num_rows - chunk_idx * this_chunk_size)
if offset < min_chunk_size:
continue
# logger.info("offset: %d" % offset)
sub_mat = mat[chunk_idx * this_chunk_size: chunk_idx * this_chunk_size + offset, :]
data = np.reshape(sub_mat, (1, sub_mat.shape[0], sub_mat.shape[1]))
total_segments_len += sub_mat.shape[0]
feed_dict = {self.input_x: data, self.dropout_keep_prob: 1.0, self.phase: False}
gpu_waiting = time.time()
xvector = sess.run(self.embedding[0], feed_dict=feed_dict)
xvector = xvector[0]
# logger.info("xvector: %s" % str(xvector.shape))
total_gpu_waiting += time.time() - gpu_waiting
tot_weight += offset
xvector_avg += offset * xvector
xvector_avg /= tot_weight
kaldi_io.write_vec_flt(output_stream, xvector_avg, key=key)
num_success += 1
logger.info("Processed %d features of average size %d frames. Done %d and failed %d" %
(total_segments, total_segments_len / total_segments, num_success, num_fail))
logger.info("Total time for neural network computations is %.2f minutes." %
(total_gpu_waiting / 60.0))
logger.info("Elapsed time for extracting whole embeddings is %.2f minutes." %
((time.time() - start_time) / 60.0))
# noinspection PyAttributeOutsideInit
class ModelWithoutDropout(Model):
def __init__(self):
super(ModelWithoutDropout, self).__init__()
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 3 * 512]
kernel_sizes = [5, 5, 7, 1, 1]
embedding_sizes = [512, 512]
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[layer_size]), name="b")
conv = tf.nn.conv1d(h, w, stride=1, padding="SAME", name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
# Statistic pooling
tf_mean, tf_var = tf.nn.moments(h, 1)
h = tf.concat([tf_mean, tf.sqrt(tf_var + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[out_dim]), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
# Softmax
with tf.variable_scope("output"):
w = tf.get_variable("w", shape=[prev_dim, num_classes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b")
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
self.loss = tf.reduce_mean(losses, name="loss")
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
# noinspection PyAttributeOutsideInit
class ModelWithoutDropoutTdnn(Model):
def __init__(self):
super(ModelWithoutDropoutTdnn, self).__init__()
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 3 * 512]
kernel_sizes = [5, 3, 3, 1, 1]
embedding_sizes = [512, 512]
dilation_rates = [1, 2, 3, 1, 1]
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[layer_size]), name="b")
conv = tf.nn.convolution(h, w, dilation_rate=[dilation_rates[i]], padding="SAME",
name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
# Apply nonlinearity and BN
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
# Statistic pooling
tf_mean, tf_var = tf.nn.moments(h, 1)
h = tf.concat([tf_mean, tf.sqrt(tf_var + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[out_dim]), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
# Softmax
with tf.variable_scope("output"):
w = tf.get_variable("w", shape=[prev_dim, num_classes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b")
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
self.loss = tf.reduce_mean(losses, name="loss")
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
# noinspection PyAttributeOutsideInit
class ModelWithoutDropoutPRelu(Model):
def __init__(self):
super(ModelWithoutDropoutPRelu, self).__init__()
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 3 * 512]
kernel_sizes = [5, 5, 7, 1, 1]
embedding_sizes = [512, 512]
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[layer_size]), name="b")
conv = tf.nn.conv1d(h, w, stride=1, padding="SAME", name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
# Apply nonlinearity and BN
h = prelu(h, shared=False)
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
# Statistic pooling
tf_mean, tf_var = tf.nn.moments(h, 1)
h = tf.concat([tf_mean, tf.sqrt(tf_var + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[out_dim]), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
h = prelu(h, shared=False)
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
# Softmax
with tf.variable_scope("output"):
w = tf.get_variable("w", shape=[prev_dim, num_classes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b")
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
self.loss = tf.reduce_mean(losses, name="loss")
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
# noinspection PyAttributeOutsideInit
class ModelL2LossWithoutDropoutPRelu(Model):
def __init__(self):
super(ModelL2LossWithoutDropoutPRelu, self).__init__()
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 3 * 512]
kernel_sizes = [5, 5, 7, 1, 1]
embedding_sizes = [512, 512]
beta = 0.0002
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
l2_loss = tf.constant(0.0)
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[layer_size]), name="b")
conv = tf.nn.conv1d(h, w, stride=1, padding="SAME", name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
# Apply nonlinearity and BN
h = prelu(h, shared=False)
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
# Statistic pooling
tf_mean, tf_var = tf.nn.moments(h, 1)
h = tf.concat([tf_mean, tf.sqrt(tf_var + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[out_dim]), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
# Apply L2 loss
if i == 0:
l2_loss += 0.1 * tf.nn.l2_loss(w)
l2_loss += 0.1 * tf.nn.l2_loss(b)
else:
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
h = prelu(h, shared=False)
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
# Softmax
with tf.variable_scope("output"):
w = tf.get_variable("w", shape=[prev_dim, num_classes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b")
# Apply L2 loss
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
# Normal loss function
loss = tf.reduce_mean(losses, name="orig_loss")
self.loss = tf.reduce_mean(loss + beta * l2_loss, name='loss')
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
# noinspection PyAttributeOutsideInit
class ModelL2LossWithoutDropoutLRelu(Model):
def __init__(self):
super(ModelL2LossWithoutDropoutLRelu, self).__init__()
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 3 * 512]
kernel_sizes = [5, 5, 7, 1, 1]
embedding_sizes = [512, 512]
beta = 0.0002
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
l2_loss = tf.constant(0.0)
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[layer_size]), name="b")
conv = tf.nn.conv1d(h, w, stride=1, padding="SAME", name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
h = tf.nn.leaky_relu(h, alpha=0.2, name='lrelu')
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
# Statistic pooling
tf_mean, tf_var = tf.nn.moments(h, 1)
h = tf.concat([tf_mean, tf.sqrt(tf_var + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[out_dim]), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
# Apply L2 loss
if i == 0:
l2_loss += 0.1 * tf.nn.l2_loss(w)
l2_loss += 0.1 * tf.nn.l2_loss(b)
else:
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
h = tf.nn.leaky_relu(h, alpha=0.2, name='lrelu')
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
# Softmax
with tf.variable_scope("output"):
w = tf.get_variable("w", shape=[prev_dim, num_classes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b")
# Apply L2 loss
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
# Normal loss function
loss = tf.reduce_mean(losses, name="orig_loss")
self.loss = tf.reduce_mean(loss + beta * l2_loss, name='loss')
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
# noinspection PyAttributeOutsideInit
class ModelL2LossWithoutDropoutLReluAttention(Model):
def __init__(self):
super(ModelL2LossWithoutDropoutLReluAttention, self).__init__()
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 6 * 512]
kernel_sizes = [5, 5, 7, 1, 1]
embedding_sizes = [512, 512]
beta = 0.0002
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
l2_loss = tf.constant(0.0)
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[layer_size]), name="b")
conv = tf.nn.conv1d(h, w, stride=1, padding="SAME", name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
# Apply nonlinearity and BN
h = tf.nn.leaky_relu(h, alpha=0.2, name='lrelu')
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
prev_dim /= 2
# apply self attention
with tf.variable_scope("attention"):
b = tf.Variable(tf.constant(0.1, shape=[prev_dim]), name="b")
v = tf.Variable(tf.constant(0.1, shape=[prev_dim]), name="v")
# Note: the dimension of the w needs more experiments, here we simply use a square matrix
w = tf.Variable(tf.truncated_normal([prev_dim, prev_dim], stddev=0.1), name="w")
h1, h2 = tf.split(h, 2, axis=2)
non_linearity = tf.nn.tanh(tf.nn.bias_add(tf.einsum('ijk,kl->ijl', h1, w), b), name="non_linearity")
attention = tf.nn.softmax(tf.einsum('ijk,k->ij', non_linearity, v), name="attention")
h_m = tf.einsum('ijk,ij->ik', h2, attention)
h_s = tf.subtract(tf.einsum('ijk,ij->ik', tf.square(h2), attention), tf.square(h_m), name="stats_var")
h = tf.concat([h_m, tf.sqrt(h_s + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=0.1), name="w")
b = tf.Variable(tf.constant(0.1, shape=[out_dim]), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
# Apply L2 loss
if i == 0:
l2_loss += 0.1 * tf.nn.l2_loss(w)
l2_loss += 0.1 * tf.nn.l2_loss(b)
else:
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
h = tf.nn.leaky_relu(h, alpha=0.2, name='lrelu')
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
# Softmax
with tf.variable_scope("output"):
w = tf.get_variable("w", shape=[prev_dim, num_classes],
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b")
# Apply L2 loss
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
# Normal loss function
loss = tf.reduce_mean(losses, name="orig_loss")
self.loss = tf.reduce_mean(loss + beta * l2_loss, name='loss')
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
# noinspection PyAttributeOutsideInit
class ModelL2LossWithoutDropoutReluHeInit(Model):
def __init__(self):
super(ModelL2LossWithoutDropoutReluHeInit, self).__init__()
def build_model(self, num_classes, input_feature_dim, output_dir, logger=None):
layer_sizes = [512, 512, 512, 512, 3 * 512]
kernel_sizes = [5, 5, 7, 1, 1]
embedding_sizes = [512, 512]
beta = 0.0002
if logger is not None:
logger.info("Start building the model ...")
tf.reset_default_graph()
self.graph = tf.Graph()
with self.graph.as_default():
self.num_classes = num_classes
# placeholder for parameter
self.learning_rate = tf.placeholder(tf.float32, name="learning_rate")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.phase = tf.placeholder(tf.bool, name="phase")
# Placeholders for regular data
self.input_x = tf.placeholder(tf.float32, [None, None, input_feature_dim], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
l2_loss = tf.constant(0.0)
h = self.input_x
# Frame level information Layer
prev_dim = input_feature_dim
for i, (kernel_size, layer_size) in enumerate(zip(kernel_sizes, layer_sizes)):
with tf.variable_scope("frame_level_info_layer-%s" % i):
kernel_shape = [kernel_size, prev_dim, layer_size]
# he_normal init
fan_in = kernel_size * prev_dim
w = tf.Variable(tf.truncated_normal(kernel_shape, stddev=tf.sqrt(2.0 / fan_in)), name="w")
# he_uniform init
limit = tf.sqrt(6.0 / fan_in)
b = tf.Variable(tf.random_uniform([layer_size], minval=-limit, maxval=limit), name="b")
conv = tf.nn.conv1d(h, w, stride=1, padding="SAME", name="conv-layer-%s" % i)
h = tf.nn.bias_add(conv, b)
# Apply nonlinearity and BN
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = layer_size
# Statistic pooling
tf_mean, tf_var = tf.nn.moments(h, 1)
h = tf.concat([tf_mean, tf.sqrt(tf_var + VAR2STD_EPSILON)], 1)
prev_dim = prev_dim * 2
# Embedding layers
for i, out_dim in enumerate(embedding_sizes):
with tf.variable_scope("embed_layer-%s" % i):
# he_normal init
w = tf.Variable(tf.truncated_normal([prev_dim, out_dim], stddev=tf.sqrt(2.0 / prev_dim)), name="w")
# he_uniform init
limit = tf.sqrt(6.0 / prev_dim)
b = tf.Variable(tf.random_uniform([out_dim], minval=-limit, maxval=limit), name="b")
h = tf.nn.xw_plus_b(h, w, b, name="scores")
# Apply L2 loss
if i == 0:
l2_loss += 0.1 * tf.nn.l2_loss(w)
l2_loss += 0.1 * tf.nn.l2_loss(b)
else:
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
h = tf.nn.relu(h, name="relu")
h = batch_norm_wrapper(h, decay=0.95, is_training=self.phase)
prev_dim = out_dim
# Softmax
with tf.variable_scope("output"):
# glorot_normal: https://keras.io/initializers/
stddev = tf.sqrt(2.0 / (prev_dim + num_classes))
w = tf.Variable(tf.truncated_normal([prev_dim, num_classes], stddev=stddev), name="w")
# glorot_uniform
limit = tf.sqrt(6.0 / (prev_dim + num_classes))
b = tf.Variable(tf.random_uniform([num_classes], minval=-limit, maxval=limit), name="b")
# Apply L2 loss
l2_loss += tf.nn.l2_loss(w)
l2_loss += tf.nn.l2_loss(b)
scores = tf.nn.xw_plus_b(h, w, b, name="scores")
predictions = tf.argmax(scores, 1, name="predictions")
losses = tf.nn.softmax_cross_entropy_with_logits(logits=scores, labels=self.input_y)
# Normal loss function
loss = tf.reduce_mean(losses, name="orig_loss")
self.loss = tf.reduce_mean(loss + beta * l2_loss, name='loss')
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss,
name="optimizer")
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
set_cuda_visible_devices(use_gpu=False, logger=logger)
with tf.Session(graph=self.graph, config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)) as sess:
if logger is not None:
logger.info("Start initializing the graph ...")
sess.run(tf.global_variables_initializer())
Model.save_model(sess, output_dir, logger)
if logger is not None:
logger.info("Building finished.")
| 45.493976
| 119
| 0.562129
| 6,929
| 56,640
| 4.374224
| 0.061192
| 0.011614
| 0.017421
| 0.023227
| 0.821703
| 0.800026
| 0.781946
| 0.773005
| 0.766307
| 0.76182
| 0
| 0.020317
| 0.332627
| 56,640
| 1,244
| 120
| 45.530547
| 0.781508
| 0.04066
| 0
| 0.739181
| 0
| 0.002339
| 0.076617
| 0.004959
| 0
| 0
| 0
| 0.000804
| 0
| 1
| 0.02924
| false
| 0.00117
| 0.012866
| 0
| 0.054971
| 0.009357
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b220786102a0afd32c92703d4abcada2d945db2
| 7,886
|
py
|
Python
|
dev/services/wms/ows_refactored/baseline_satellite_data/sentinel2/ows_nrt_cfg.py
|
nf-s/dea-config
|
e0fe5fde8a99ad29472c5d14531ecc9208578040
|
[
"Apache-2.0"
] | 14
|
2018-08-20T00:31:50.000Z
|
2022-02-04T08:13:06.000Z
|
dev/services/wms/ows_refactored/baseline_satellite_data/sentinel2/ows_nrt_cfg.py
|
nf-s/dea-config
|
e0fe5fde8a99ad29472c5d14531ecc9208578040
|
[
"Apache-2.0"
] | 145
|
2018-06-04T05:06:14.000Z
|
2022-02-22T23:02:00.000Z
|
dev/services/wms/ows_refactored/baseline_satellite_data/sentinel2/ows_nrt_cfg.py
|
nf-s/dea-config
|
e0fe5fde8a99ad29472c5d14531ecc9208578040
|
[
"Apache-2.0"
] | 35
|
2018-06-04T05:04:15.000Z
|
2022-01-25T07:48:07.000Z
|
from ows_refactored.baseline_satellite_data.sentinel2.band_s2_cfg import \
bands_sentinel2
from ows_refactored.baseline_satellite_data.sentinel2.style_s2_cfg import \
styles_s2_list
from ows_refactored.ows_reslim_cfg import reslim_wms_min_zoom_lvl_7
combined_layer = {
"title": "DEA Surface Reflectance (Sentinel-2 Near Real-Time)",
"name": "s2_nrt_granule_nbar_t",
"abstract": """Sentinel-2 Multispectral Instrument - Nadir BRDF Adjusted Reflectance + Terrain Illumination Correction near real time (Sentinel-2 MSI)
This is a 90-day rolling archive of daily Sentinel-2 Near Real Time data. The Near Real-Time capability provides analysis-ready data that is processed on receipt using the best-available ancillary information at the time to provide atmospheric corrections.
The Normalised Difference Chlorophyll Index (NDCI) is based on the method of Mishra & Mishra 2012, and adapted to bands on the Sentinel-2A & B sensors.
The index indicates levels of chlorophyll-a (chl-a) concentrations in complex turbid productive waters such as those encountered in many inland water bodies. The index has not been validated in Australian waters, and there are a range of environmental conditions that may have an effect on the accuracy of the derived index values in this test implementation, including:
- Influence on the remote sensing signal from nearby land and/or atmospheric effects
- Optically shallow water
- Cloud cover
Mishra, S., Mishra, D.R., 2012. Normalized difference chlorophyll index: A novel model for remote estimation of chlorophyll-a concentration in turbid productive waters. Remote Sensing of Environment, Remote Sensing of Urban Environments 117, 394–406. https://doi.org/10.1016/j.rse.2011.10.016
For more information see http://pid.geoscience.gov.au/dataset/ga/122229
https://cmi.ga.gov.au/data-products/dea/190/dea-surface-reflectance-nbart-sentinel-2-msi
For service status information, see https://status.dea.ga.gov.au""",
"multi_product": True,
"product_names": ["s2a_nrt_granule", "s2b_nrt_granule"],
"bands": bands_sentinel2,
"resource_limits": reslim_wms_min_zoom_lvl_7,
"dynamic": True,
"native_crs": "EPSG:3577",
"native_resolution": [10.0, 10.0],
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"flags": [
{
"band": "fmask",
"products": ["s2a_nrt_granule", "s2b_nrt_granule"],
"ignore_time": False,
"ignore_info_flags": [],
},
{
"band": "land",
"products": ["geodata_coast_100k", "geodata_coast_100k"],
"ignore_time": True,
"ignore_info_flags": []
},
],
"styling": {"default_style": "simple_rgb", "styles": styles_s2_list},
}
s2b_layer = {
"name": "s2b_nrt_granule_nbar_t",
"title": "DEA Surface Reflectance (Sentinel-2B MSI Near Real-Time)",
"abstract": """Sentinel-2 Multispectral Instrument - Nadir BRDF Adjusted Reflectance + Terrain Illumination Correction (Sentinel-2B MSI) near real time
This is a 90-day rolling archive of daily Sentinel-2 Near Real Time data. The Near Real-Time capability provides analysis-ready data that is processed on receipt using the best-available ancillary information at the time to provide atmospheric corrections.
The Normalised Difference Chlorophyll Index (NDCI) is based on the method of Mishra & Mishra 2012, and adapted to bands on the Sentinel-2A & B sensors.
The index indicates levels of chlorophyll-a (chl-a) concentrations in complex turbid productive waters such as those encountered in many inland water bodies. The index has not been validated in Australian waters, and there are a range of environmental conditions that may have an effect on the accuracy of the derived index values in this test implementation, including:
- Influence on the remote sensing signal from nearby land and/or atmospheric effects
- Optically shallow water
- Cloud cover
Mishra, S., Mishra, D.R., 2012. Normalized difference chlorophyll index: A novel model for remote estimation of chlorophyll-a concentration in turbid productive waters. Remote Sensing of Environment, Remote Sensing of Urban Environments 117, 394–406. https://doi.org/10.1016/j.rse.2011.10.016
For more information see http://pid.geoscience.gov.au/dataset/ga/122229
https://cmi.ga.gov.au/data-products/dea/190/dea-surface-reflectance-nbart-sentinel-2-msi
For service status information, see https://status.dea.ga.gov.au
""",
"product_name": "s2b_nrt_granule",
"bands": bands_sentinel2,
"resource_limits": reslim_wms_min_zoom_lvl_7,
"dynamic": True,
"native_crs": "EPSG:3577",
"native_resolution": [10.0, 10.0],
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"flags": [
{
"band": "fmask",
"product": "s2b_nrt_granule",
"ignore_time": False,
"ignore_info_flags": []
},
{
"band": "land",
"product": "geodata_coast_100k",
"ignore_time": True,
"ignore_info_flags": []
},
],
"styling": {"default_style": "simple_rgb", "styles": styles_s2_list},
}
s2a_layer = {
"name": "s2a_nrt_granule_nbar_t",
"title": "DEA Surface Reflectance (Sentinel-2A MSI Near Real-Time)",
"abstract": """Sentinel-2 Multispectral Instrument - Nadir BRDF Adjusted Reflectance + Terrain Illumination Correction (Sentinel-2A MSI) near real time
This is a 90-day rolling archive of daily Sentinel-2 Near Real Time data. The Near Real-Time capability provides analysis-ready data that is processed on receipt using the best-available ancillary information at the time to provide atmospheric corrections.
The Normalised Difference Chlorophyll Index (NDCI) is based on the method of Mishra & Mishra 2012, and adapted to bands on the Sentinel-2A & B sensors.
The index indicates levels of chlorophyll-a (chl-a) concentrations in complex turbid productive waters such as those encountered in many inland water bodies. The index has not been validated in Australian waters, and there are a range of environmental conditions that may have an effect on the accuracy of the derived index values in this test implementation, including:
- Influence on the remote sensing signal from nearby land and/or atmospheric effects
- Optically shallow water
- Cloud cover
Mishra, S., Mishra, D.R., 2012. Normalized difference chlorophyll index: A novel model for remote estimation of chlorophyll-a concentration in turbid productive waters. Remote Sensing of Environment, Remote Sensing of Urban Environments 117, 394–406. https://doi.org/10.1016/j.rse.2011.10.016
https://cmi.ga.gov.au/data-products/dea/190/dea-surface-reflectance-nbart-sentinel-2-msi
For more information see http://pid.geoscience.gov.au/dataset/ga/122229
For service status information, see https://status.dea.ga.gov.au""",
"product_name": "s2a_nrt_granule",
"bands": bands_sentinel2,
"resource_limits": reslim_wms_min_zoom_lvl_7,
"dynamic": True,
"native_crs": "EPSG:3577",
"native_resolution": [10.0, 10.0],
"image_processing": {
"extent_mask_func": "datacube_ows.ogc_utils.mask_by_val",
"always_fetch_bands": [],
"manual_merge": False,
},
"flags": [
{
"band": "fmask",
"product": "s2a_nrt_granule",
"ignore_time": False,
"ignore_info_flags": []
},
{
"band": "land",
"product": "geodata_coast_100k",
"ignore_time": True,
"ignore_info_flags": []
},
],
"styling": {"default_style": "simple_rgb", "styles": styles_s2_list},
}
| 54.386207
| 370
| 0.715191
| 1,089
| 7,886
| 5.043159
| 0.202938
| 0.01748
| 0.02622
| 0.011653
| 0.955208
| 0.94665
| 0.936817
| 0.919701
| 0.919701
| 0.901857
| 0
| 0.033993
| 0.186787
| 7,886
| 144
| 371
| 54.763889
| 0.821924
| 0
| 0
| 0.646154
| 0
| 0.138462
| 0.770353
| 0.021177
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023077
| 0
| 0.023077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b2fad7e07ae62e0d699f35c1ea740aa1ff90991
| 5,117
|
py
|
Python
|
models/account_tests.py
|
elementechemlyn/CareConnectBuilder
|
c004fa94c1af64d636ee25de8f13e34fe723b5f3
|
[
"MIT"
] | 1
|
2021-12-24T11:14:38.000Z
|
2021-12-24T11:14:38.000Z
|
models/account_tests.py
|
elementechemlyn/CareConnectBuilder
|
c004fa94c1af64d636ee25de8f13e34fe723b5f3
|
[
"MIT"
] | null | null | null |
models/account_tests.py
|
elementechemlyn/CareConnectBuilder
|
c004fa94c1af64d636ee25de8f13e34fe723b5f3
|
[
"MIT"
] | 1
|
2020-09-16T14:47:26.000Z
|
2020-09-16T14:47:26.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 on 2017-03-22.
# 2017, SMART Health IT.
import os
import io
import unittest
import json
from . import account
from .fhirdate import FHIRDate
class AccountTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("Account", js["resourceType"])
return account.Account(js)
def testAccount1(self):
inst = self.instantiate_from("account-example-with-guarantor.json")
self.assertIsNotNone(inst, "Must have instantiated a Account instance")
self.implAccount1(inst)
js = inst.as_json()
self.assertEqual("Account", js["resourceType"])
inst2 = account.Account(js)
self.implAccount1(inst2)
def implAccount1(self, inst):
self.assertEqual(inst.active.end.date, FHIRDate("2016-06-30").date)
self.assertEqual(inst.active.end.as_json(), "2016-06-30")
self.assertEqual(inst.active.start.date, FHIRDate("2016-01-01").date)
self.assertEqual(inst.active.start.as_json(), "2016-01-01")
self.assertEqual(inst.balance.code, "USD")
self.assertEqual(inst.balance.system, "urn:iso:std:iso:4217")
self.assertEqual(inst.balance.unit, "USD")
self.assertEqual(inst.balance.value, -1200)
self.assertEqual(inst.coverage[0].priority, 1)
self.assertEqual(inst.coverage[1].priority, 2)
self.assertEqual(inst.description, "Hospital charges")
self.assertFalse(inst.guarantor[0].onHold)
self.assertEqual(inst.guarantor[0].period.start.date, FHIRDate("2016-01-01").date)
self.assertEqual(inst.guarantor[0].period.start.as_json(), "2016-01-01")
self.assertEqual(inst.id, "ewg")
self.assertEqual(inst.identifier[0].system, "urn:oid:0.1.2.3.4.5.6.7")
self.assertEqual(inst.identifier[0].value, "654321")
self.assertEqual(inst.name, "Inpatient: Peter James Chalmers")
self.assertEqual(inst.period.end.date, FHIRDate("2016-06-30").date)
self.assertEqual(inst.period.end.as_json(), "2016-06-30")
self.assertEqual(inst.period.start.date, FHIRDate("2016-01-01").date)
self.assertEqual(inst.period.start.as_json(), "2016-01-01")
self.assertEqual(inst.status, "active")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">Inpatient Admission for Peter James Chalmers Account</div>")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "PBILLACCT")
self.assertEqual(inst.type.coding[0].display, "patient billing account")
self.assertEqual(inst.type.coding[0].system, "http://hl7.org/fhir/v3/ActCode")
self.assertEqual(inst.type.text, "patient")
def testAccount2(self):
inst = self.instantiate_from("account-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Account instance")
self.implAccount2(inst)
js = inst.as_json()
self.assertEqual("Account", js["resourceType"])
inst2 = account.Account(js)
self.implAccount2(inst2)
def implAccount2(self, inst):
self.assertEqual(inst.active.end.date, FHIRDate("2016-06-30").date)
self.assertEqual(inst.active.end.as_json(), "2016-06-30")
self.assertEqual(inst.active.start.date, FHIRDate("2016-01-01").date)
self.assertEqual(inst.active.start.as_json(), "2016-01-01")
self.assertEqual(inst.balance.code, "USD")
self.assertEqual(inst.balance.system, "urn:iso:std:iso:4217")
self.assertEqual(inst.balance.unit, "USD")
self.assertEqual(inst.balance.value, -1200)
self.assertEqual(inst.coverage[0].priority, 1)
self.assertEqual(inst.description, "Hospital charges")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.identifier[0].system, "urn:oid:0.1.2.3.4.5.6.7")
self.assertEqual(inst.identifier[0].value, "654321")
self.assertEqual(inst.name, "HACC Funded Billing for Peter James Chalmers")
self.assertEqual(inst.period.end.date, FHIRDate("2016-06-30").date)
self.assertEqual(inst.period.end.as_json(), "2016-06-30")
self.assertEqual(inst.period.start.date, FHIRDate("2016-01-01").date)
self.assertEqual(inst.period.start.as_json(), "2016-01-01")
self.assertEqual(inst.status, "active")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">HACC Funded Billing for Peter James Chalmers</div>")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "PBILLACCT")
self.assertEqual(inst.type.coding[0].display, "patient billing account")
self.assertEqual(inst.type.coding[0].system, "http://hl7.org/fhir/v3/ActCode")
self.assertEqual(inst.type.text, "patient")
| 50.166667
| 145
| 0.667774
| 668
| 5,117
| 5.091317
| 0.194611
| 0.246986
| 0.296089
| 0.060864
| 0.817407
| 0.806822
| 0.806822
| 0.730961
| 0.730961
| 0.730961
| 0
| 0.060513
| 0.176471
| 5,117
| 101
| 146
| 50.663366
| 0.746559
| 0.022279
| 0
| 0.623529
| 1
| 0
| 0.186112
| 0.020412
| 0
| 0
| 0
| 0
| 0.694118
| 1
| 0.058824
| false
| 0
| 0.070588
| 0
| 0.152941
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8676b9241074d5684bda3a90129c24ff659f6e72
| 56,561
|
py
|
Python
|
shog_logic.py
|
BurgundyIsAPublicEnemy/ShogAI
|
6e22165a09a5663418a3ed7f1edbc20bcb7dd501
|
[
"MIT"
] | null | null | null |
shog_logic.py
|
BurgundyIsAPublicEnemy/ShogAI
|
6e22165a09a5663418a3ed7f1edbc20bcb7dd501
|
[
"MIT"
] | 3
|
2020-02-03T00:35:02.000Z
|
2020-03-02T12:29:31.000Z
|
shog_logic.py
|
BurgundyIsAPublicEnemy/ShogAI
|
6e22165a09a5663418a3ed7f1edbc20bcb7dd501
|
[
"MIT"
] | null | null | null |
import upsidedown
from tkinter import Button
from tkinter import messagebox
import random
import copy
from shog_ext import shog_recorder
from shog_ext import shog_play_external_moves
class shog_logic:
def __init__(self, gameState, cells, turnIndicator, dropBlacks, dropWhites, dropBlacksPieces, dropWhitePieces):
self.gameState = gameState
self.cells = cells
self.turnIndicator = turnIndicator
self.dropBlacks = dropBlacks
self.dropWhites = dropWhites
self.dropBlacksPieces = dropBlacksPieces
self.dropWhitePieces = dropWhitePieces
self.simulMoveMatrix = []
self.simulMoveMatrixPre = []
def click(self, row, col):
pos = self.gameState.gameMatrix[row][col]
if self.gameState.gameState == 3:
self.cells[(row, col)].configure(background='RED')
print 'PIECE DROP:' + str(self.getPieceFrmPos(row + 1, col + 1))
if (str(self.getPieceFrmPos(row + 1, col + 1)) == '0'):
self.gameState.newMatrixPosX = row
self.gameState.newMatrixPosY = col
pos = None
if self.gameState.isBlackTurn == True:
print ('BLACK', len(self.gameState.blackcaptured), self.gameState.droprank, self.gameState.blackcaptured)
pos = self.gameState.blackcaptured[self.gameState.droprank]
else:
print ('WHITE', len(self.gameState.whitecaptured), self.gameState.droprank, self.gameState.whitecaptured)
pos = self.gameState.whitecaptured[self.gameState.droprank]
if 'n' in pos:
if (row <= 1 and self.gameState.isBlackTurn == True) or (row >= 7 and self.gameState.isBlackTurn == False):
print 'Too deep for knight'
self.resetBoardGraphics()
pos = None
self.softReset()
else:
self.moveLegalDrop(pos, row, col)
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isBlackTurn == True:
#self.gameState.blackcaptured.pop(self.gameState.droprank)
self.dropBlacksPieces[self.gameState.droprank].pack_forget()
else:
#self.gameState.whitecaptured.pop(self.gameState.droprank)
self.dropWhitePieces[self.gameState.droprank].pack_forget()
self.ResetSwitchTurns()
elif 'l' in pos:
if (row == 0 and self.gameState.isBlackTurn == True) or (row == 8 and self.gameState.isBlackTurn == False):
print 'Too deep for lance'
self.resetBoardGraphics()
pos = None
self.softReset()
else:
self.moveLegalDrop(pos, row, col)
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isBlackTurn == True:
#self.gameState.blackcaptured.pop(self.gameState.droprank)
self.dropBlacksPieces[self.gameState.droprank].pack_forget()
else:
#self.gameState.whitecaptured.pop(self.gameState.droprank)
self.dropWhitePieces[self.gameState.droprank].pack_forget()
self.ResetSwitchTurns()
#no 2 pawn rule
elif 'p' in pos:
if (row == 8 and self.gameState.isBlackTurn == True) or (row == 0 and self.gameState.isBlackTurn == False):
print 'Too deep for pawn'
self.resetBoardGraphics()
self.softReset()
pos = None
else:
colMat = []
for y in range(0, self.gameState.board_size):
colMat.append(self.gameState.gameMatrix[y][col])
print colMat
pawnTeam = 'p'
if self.gameState.isBlackTurn == True:
pawnTeam = 'B' + pawnTeam
else:
pawnTeam = 'W' + pawnTeam
if pawnTeam in colMat:
print 'There is a pawn on this column'
pos = None
self.resetBoardGraphics()
self.softReset()
else:
#get pos of king
kingTeam = 'k'
if self.gameState.isBlackTurn == True:
kingTeam = 'W' + kingTeam
else:
kingTeam = 'B' + kingTeam
self.moveLegalDrop(pawnTeam, row, col)
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isBlackTurn == True:
print 'Removing from Black stack'
#self.gameState.blackcaptured.pop(self.gameState.droprank)
self.dropBlacksPieces[self.gameState.droprank].pack_forget()
else:
print 'Removing from White stack'
#self.gameState.whitecaptured.pop(self.gameState.droprank)
self.dropWhitePieces[self.gameState.droprank].pack_forget()
self.ResetSwitchTurns()
elif 'p' not in pos and 'l' not in pos and 'n' not in pos:
pos = pos[-1:]
if self.gameState.isBlackTurn == True:
pos = 'B' + pos
else:
pos = 'W' + pos
self.moveLegalDrop(pos, row, col)
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isBlackTurn == True:
#self.gameState.blackcaptured.pop(self.gameState.droprank)
self.dropBlacksPieces[self.gameState.droprank].pack_forget()
else:
#self.gameState.whitecaptured.pop(self.gameState.droprank)
self.dropWhitePieces[self.gameState.droprank].pack_forget()
self.ResetSwitchTurns()
else:
print 'A piece is already there. Move illegal.'
self.resetBoardGraphics()
elif self.gameState.gameState == 1:
self.cells[(row, col)].configure(background='blue')
self.gameState.newMatrixPosX = row
self.gameState.newMatrixPosY = col
self.gameState.gameState = 2
elif self.gameState.gameState == 0:
#AI parts
shog_ext = shog_play_external_moves()
if (shog_ext.isThereAMoveToPlay_ext()):
moveRead = shog_ext.convertTurnToGameMatrixCompatible()
print 'Hallo' + str(moveRead)
possiblepcs = []
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
possiblepc = self.getPosWhichCanMakeMove(i, j, p, moveRead[4] + 1, moveRead[5] + 1)
if possiblepc != '':
possiblepcs.append((possiblepc, i, j))
print 'PRE-MOVE: ' + str(possiblepcs)
if len(possiblepcs) == 1:
pos = possiblepcs[0][0]
self.gameState.oldMatrixPosX = possiblepcs[0][1]
self.gameState.oldMatrixPosY = possiblepcs[0][2]
self.gameState.newMatrixPosX = moveRead[4]
self.gameState.newMatrixPosY = moveRead[5]
self.gameState.pieceSelected = pos
self.gameState.gameState = 2
self.gameState.gameState = self.getPossibleMoves(self.gameState.oldMatrixPosX, self.gameState.oldMatrixPosY, pos)
open('ext_data/movetoplay.txt', 'w').close()
else:
for c in range(0, len(possiblepcs)):
if possiblepcs[c][0] == 'B' + moveRead[1]:
print 'This is the one'
pos = moveRead[1]
self.gameState.oldMatrixPosX = moveRead[2]
self.gameState.oldMatrixPosY = moveRead[3]
self.gameState.newMatrixPosX = moveRead[4]
self.gameState.newMatrixPosY = moveRead[5]
self.gameState.pieceSelected = moveRead[1]
self.gameState.gameState = 2
self.gameState.gameState = self.getPossibleMoves(self.gameState.oldMatrixPosX, self.gameState.oldMatrixPosY, pos)
open('ext_data/movetoplay.txt', 'w').close()
print possiblepcs[c]
else:
if pos != 0:
self.cells[(row, col)].configure(background='yellow')
self.gameState.oldMatrixPosX = row
self.gameState.oldMatrixPosY = col
self.gameState.pieceSelected = pos
self.gameState.gameState = self.getPossibleMoves(self.gameState.oldMatrixPosX, self.gameState.oldMatrixPosY, pos)
if self.gameState.newMatrixPosX != None and self.gameState.newMatrixPosY != None and self.gameState.pieceSelected != None:
print 'TRYING: ' + str((self.gameState.pieceSelected, self.gameState.oldMatrixPosX, self.gameState.oldMatrixPosY, self.gameState.newMatrixPosX, self.gameState.newMatrixPosY))
self.resetBoardGraphics()
self.moveLegalGO(self.gameState.pieceSelected, self.gameState.oldMatrixPosX, self.gameState.oldMatrixPosY, self.gameState.newMatrixPosX, self.gameState.newMatrixPosY)
def resetBoardGraphics(self):
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
self.cells[(i, j)].configure(background='white')
def ResetSwitchTurns(self):
self.resetBoardGraphics()
self.softReset()
if (self.gameState.isBlackTurn == True):
self.turnIndicator.configure(text='White Turn')
self.gameState.isBlackTurn = False
else:
self.turnIndicator.configure(text='Black Turn')
self.gameState.isBlackTurn = True
def softReset(self):
self.gameState.newMatrixPosX = None
self.gameState.newMatrixPosY = None
self.gameState.oldMatrixPosX = None
self.gameState.oldMatrixPosY = None
self.gameState.pieceSelected = None
self.gameState.gameState = 0
def getPosFromPiece(self, pos):
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
if pos == self.gameState.gameMatrix[i][j]:
return i,j
return None
def getPieceFrmPos(self, h, w):
return self.gameState.gameMatrix[(h-1)][(w-1)]
def getPossibleMoves(self, oldMatrixPosX, oldMatrixPosY, pos):
if (self.gameState.isBlackTurn == True and pos[:-1] == 'B') or (self.gameState.isBlackTurn == False and pos[:-1] == 'W'):
with open('movesets.txt') as f:
content = f.readlines()
for index in range(len(content)):
if pos[-1:] in content[index]:
movesets = content[index].split('=')[1]
#cast movesets to array
possiblemovelayouts = eval(movesets)
for j in range(len(possiblemovelayouts)):
x_dif = int((possiblemovelayouts[j])[0])
y_dif = int((possiblemovelayouts[j])[1])
if pos[:-1] == 'B':
x_dif = -1 * x_dif
y_dif = -1 * y_dif
if pos[:-1] == 'W':
x_dif = 1 * x_dif
y_dif = 1 * y_dif
try:
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == False):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'B') and self.gameState.isBlackTurn == True):
self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].configure(background='orange')
self.gameState.possibleMoveMatrix.append((oldMatrixPosX + x_dif, oldMatrixPosY + y_dif))
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'W') and self.gameState.isBlackTurn == False):
self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].configure(background='orange')
self.gameState.possibleMoveMatrix.append((oldMatrixPosX + x_dif, oldMatrixPosY + y_dif))
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == False):
break
except Exception as e:
print e
print('Move not on board so ignoring')
return 1
else:
print 'It is not your turn yet'
return 0
def getNumberPossibleMoves(self, oldMatrixPosX, oldMatrixPosY, pos):
count = 0
with open('movesets.txt') as f:
content = f.readlines()
for index in range(len(content)):
if pos[-1:] in content[index]:
movesets = content[index].split('=')[1]
#cast movesets to array
possiblemovelayouts = eval(movesets)
for j in range(len(possiblemovelayouts)):
x_dif = int((possiblemovelayouts[j])[0])
y_dif = int((possiblemovelayouts[j])[1])
if pos[:-1] == 'B':
x_dif = -1 * x_dif
y_dif = -1 * y_dif
if pos[:-1] == 'W':
x_dif = 1 * x_dif
y_dif = 1 * y_dif
try:
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == False):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'B') and self.gameState.isBlackTurn == True):
count = count + 1
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'W') and self.gameState.isBlackTurn == False):
count = count + 1
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == False):
break
except Exception as e:
print('Move not on board so ignoring')
return count
def moveLegalGO(self, pos, oldMatrixPosXlocal, oldMatrixPosYlocal, newMatrixPosXlocal, newMatrixPosYlocal):
#For the game recorder
resultPromotion = False
resultCapture = False
resultDrop = False
if ((newMatrixPosXlocal,newMatrixPosYlocal)) in self.gameState.possibleMoveMatrix:
#When recording games... we check who actually made the move
#Done by checking who can do it, THEN seeing if we need to clear ambiguity
possiblepcs = []
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
possiblepc = self.getPosWhichCanMakeMove(i, j, p, newMatrixPosXlocal + 1, newMatrixPosYlocal + 1)
if possiblepc != '':
possiblepcs.append(possiblepc)
print possiblepcs
#Get current pre-move position we are moving to
old_state_pos = self.getPieceFrmPos(newMatrixPosXlocal + 1, newMatrixPosYlocal + 1)
#Promotion
if (self.gameState.isBlackTurn == True and (newMatrixPosXlocal <= 2 or oldMatrixPosXlocal <= 2)):
if (pos[-1:] == 'p' and newMatrixPosXlocal <= 0) or (pos[-1:] == 'n' and newMatrixPosXlocal <= 1) or (pos[-1:] == 'l' and newMatrixPosXlocal <= 0):
pos = pos.upper()
resultPromotion = True
else:
pos, resultPromotion = self.promotion(pos)
if (self.gameState.isBlackTurn == False and (newMatrixPosXlocal >= 6 or oldMatrixPosXlocal >= 6)):
if (pos[-1:] == 'p' and newMatrixPosXlocal >= 8) or (pos[-1:] == 'n' and newMatrixPosXlocal >= 7) or (pos[-1:] == 'l' and newMatrixPosXlocal >= 8):
pos = pos.upper()
resultPromotion = True
else:
pos, resultPromotion = self.promotion(pos)
#Capture
if (self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] != 0):
print 'Captured: ' + self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal]
cap_piece = self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal]
if self.gameState.isBlackTurn == True:
self.gameState.blackcaptured.append('B' + cap_piece[-1:].lower())
print ('Adding to black: ' + str(len(self.gameState.blackcaptured)))
newButton = Button(self.dropBlacks, text= 'B' + str(self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal]).lower()[-1:], command = lambda row=len(self.gameState.blackcaptured), piece='B' : self.clickDrop(row, piece))
newButton.pack()
self.dropBlacksPieces.append(newButton)
resultCapture = True
if self.gameState.isBlackTurn == False:
print ('Adding to white: ' + str(len(self.gameState.whitecaptured)))
self.gameState.whitecaptured.append('W' + cap_piece[-1:].lower())
newButton = Button(self.dropWhites, text= 'W' + str(self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal]).lower()[-1:], command = lambda row=len(self.gameState.whitecaptured), piece='W' : self.clickDrop(row, piece))
newButton.pack()
self.dropWhitePieces.append(newButton)
resultCapture = True
self.gameState.gameState = 0
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = pos
self.gameState.gameMatrix[oldMatrixPosXlocal][oldMatrixPosYlocal] = 0
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text='')
#Check for checks
#This method. Is. perfect.
if self.gameState.isCheck == False:
#Does our move reveal a check for the other team?
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
if self.gameState.isCheck == True:
print 'ILLEGAL MOVE: Reveals check'
break
if self.gameState.isCheck == True:
break
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isCheck == False:
#Does our move give a check?
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
else:
#Does our move get us out of a check?
print 'Now that the opponents move has been made, lets check if check is still valid'
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
#reminder: [y axis][x axis]
if self.gameState.isBlackTurn == True:
kingcolor = self.cells[self.getPosFromPiece('Wk')].cget('background')
else:
kingcolor = self.cells[self.getPosFromPiece('Bk')].cget('background')
if (kingcolor == 'cyan') :
print 'Still in check, Restart that move'
old_fill = old_state_pos
if old_fill == 0:
old_fill = ''
print 'Resetting old position: ' + str(old_fill) + ' as move ' + str(pos) + ' is illegal'
#Load back or direct drop?
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(str(old_fill)[-1:]))
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(str(pos)[-1:]))
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=str(old_fill)[-1:])
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text=str(pos)[-1:])
#self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = old_state_pos
self.gameState.gameMatrix[oldMatrixPosXlocal][oldMatrixPosYlocal] = pos
self.resetBoardGraphics()
self.softReset()
return
else:
print 'King is out of check, continue play'
self.gameState.isCheck = False
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=pos[-1:])
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.resetBoardGraphics()
self.gameState.newMatrixPosX = None
self.gameState.newMatrixPosY = None
self.gameState.oldMatrixPosX = None
self.gameState.oldMatrixPosY = None
self.gameState.pieceSelected = None
if len(possiblepcs) != len(set(possiblepcs)):
shog_recorder().recordMove(pos, resultPromotion, resultCapture, resultDrop, newMatrixPosYlocal, newMatrixPosXlocal, oldMatrixPosYlocal , oldMatrixPosXlocal )
else:
shog_recorder().recordMove(pos, resultPromotion, resultCapture, resultDrop, newMatrixPosYlocal, newMatrixPosXlocal)
if (self.gameState.isBlackTurn == True):
self.turnIndicator.configure(text='White Turn')
self.gameState.isBlackTurn = False
else:
self.turnIndicator.configure(text='Black Turn')
self.gameState.isBlackTurn = True
else:
print 'That move is NOT legal!'
self.resetBoardGraphics()
self.softReset()
self.gameState.possibleMoveMatrix *= 0
#Now we check if its a checkmate
if (self.gameState.isCheck == True):
#Get all of your available moves
print('Check if it is a checkmate')
resetMatrix = copy.deepcopy(self.gameState.gameMatrix)
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
self.populateSimulMoveArrays(i, j, str(self.gameState.gameMatrix[i][j]), True)
for i in range(0, len(self.simulMoveMatrix)):
if (self.simulateMove (self.simulMoveMatrixPre[i][0], self.simulMoveMatrixPre[i][1], self.simulMoveMatrixPre[i][2], self.simulMoveMatrix[i][0], self.simulMoveMatrix[i][1], i) == False):
print (i)
break
print (i, len(self.simulMoveMatrix))
if i == (len(self.simulMoveMatrix) - 1):
#Check if we can drop a piece to cover the check
for k in range(0, len(self.simulMoveMatrix)):
if 'k' in self.simulMoveMatrix[k][2]:
print self.simulMoveMatrix[k]
if (self.gameState.isBlackTurn == True):
if (self.simulateDrop('Wp', self.simulMoveMatrix[i][0], self.simulMoveMatrix[i][1]) == False):
break
else:
if (self.simulateDrop('Bp', self.simulMoveMatrix[i][0], self.simulMoveMatrix[i][1]) == False):
break
print 'Checkmate!'
self.simulMoveMatrixPre *= 0
self.simulMoveMatrix *= 0
def getPosWhichCanMakeMove(self, oldMatrixPosX, oldMatrixPosY, pos, newMatrixPosX, newMatrixPosY):
if (self.gameState.isBlackTurn == True and pos[:-1] == 'B') or (self.gameState.isBlackTurn == False and pos[:-1] == 'W'):
with open('movesets.txt') as f:
content = f.readlines()
for index in range(len(content)):
if pos[-1:] in content[index]:
movesets = content[index].split('=')[1]
#cast movesets to array
possiblemovelayouts = eval(movesets)
for j in range(len(possiblemovelayouts)):
x_dif = int((possiblemovelayouts[j])[0])
y_dif = int((possiblemovelayouts[j])[1])
if pos[:-1] == 'B':
x_dif = -1 * x_dif
y_dif = -1 * y_dif
if pos[:-1] == 'W':
x_dif = 1 * x_dif
y_dif = 1 * y_dif
try:
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == False):
break
if ((oldMatrixPosX + x_dif + 1 == newMatrixPosX) and (oldMatrixPosY + y_dif + 1 == newMatrixPosY)):
self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].configure(background='red')
return pos
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == False):
break
except Exception as e:
pass
return ''
else:
return ''
def isKingUnderCheck(self, oldMatrixPosX, oldMatrixPosY, pos):
if (self.gameState.isBlackTurn == True and pos[:-1] == 'B') or (self.gameState.isBlackTurn == False and pos[:-1] == 'W'):
with open('movesets.txt') as f:
content = f.readlines()
for index in range(len(content)):
if pos[-1:] in content[index]:
movesets = content[index].split('=')[1]
#cast movesets to array
possiblemovelayouts = eval(movesets)
for j in range(len(possiblemovelayouts)):
x_dif = int((possiblemovelayouts[j])[0])
y_dif = int((possiblemovelayouts[j])[1])
if pos[:-1] == 'B':
x_dif = -1 * x_dif
y_dif = -1 * y_dif
if pos[:-1] == 'W':
x_dif = 1 * x_dif
y_dif = 1 * y_dif
try:
if oldMatrixPosX + x_dif >= 0 and oldMatrixPosY + y_dif >= 0:
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == False):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'B') and self.gameState.isBlackTurn == True):
if str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1)) == 'Wk':
print 'BLACK CHECK!'
self.gameState.isCheck = True
self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].configure(background='cyan')
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'W') and self.gameState.isBlackTurn == False):
if str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1)) == 'Bk':
print(oldMatrixPosX + x_dif , oldMatrixPosY + y_dif, pos )
print 'WHITE CHECK!'
self.gameState.isCheck = True
self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].configure(background='cyan')
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == True):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == False):
break
except Exception as e:
pass
return 1
else:
return 0
def promotion(self, pos):
if 'g' not in pos and 'k' not in pos and pos[-1:].islower() == True:
MsgBox = messagebox.askquestion("Promotion!", "You have reached promotion. Would you like to promote your piece?")
if MsgBox == 'yes':
return pos.upper(), True
else:
return pos, False
return pos, False
def clickDrop(self, row, piece):
print ('DROPPING: ', row, piece)
with open('configure.txt') as f:
configContent = f.readlines()
dropSetting = int(configContent[1])
if (dropSetting == 0):
if (self.gameState.isBlackTurn == True and 'B' in piece) or (self.gameState.isBlackTurn == False and 'W' in piece):
self.gameState.droprank = row - 1
self.gameState.gameState = 3
else:
print 'You can not drop your opponents pieces'
self.resetBoardGraphics()
self.gameState.gameState = 0
def moveLegalDrop(self, pos, newMatrixPosXlocal, newMatrixPosYlocal):
global isCheck, GAMESTATE, newMatrixPosX, newMatrixPosY, posToMove, gameMatrix, BLACKTURN
#Get current pre-move position we are moving to
old_state_pos = self.getPieceFrmPos(newMatrixPosXlocal + 1, newMatrixPosYlocal + 1)
print old_state_pos
#For the game recorder
resultPromotion = False
resultCapture = False
resultDrop = True
#No Capturing or Promotion
self.gameState.gameState = 0
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = pos
#Check for checks
#This method. Is. perfect.
if self.gameState.isCheck == False:
#Does our move reveal a check for the other team?
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
if self.gameState.isCheck == True:
print 'ILLEGAL MOVE: Reveals check'
break
if self.gameState.isCheck == True:
break
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isCheck == False:
#Does our move give a check?
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
else:
#Does our move get us out of a check?
print 'Now that the opponents move has been made, lets check if check is still valid'
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
#reminder: [y axis][x axis]
if self.gameState.isBlackTurn == True:
kingcolor = self.cells[self.getPosFromPiece('Wk')].cget('background')
else:
kingcolor = self.cells[self.getPosFromPiece('Bk')].cget('background')
if (kingcolor == 'cyan') :
print 'Still in check, Restart that move'
old_fill = old_state_pos
if old_fill == 0:
old_fill = ''
print 'Resetting old position: ' + str(old_fill) + ' as move ' + str(pos) + ' is illegal'
#Load back or direct drop?
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
#self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = old_state_pos
self.resetBoardGraphics()
self.softReset()
return
else:
print 'King is out of check, continue play'
self.gameState.isCheck = False
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=pos[-1:])
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.resetBoardGraphics()
self.softReset()
if (self.gameState.isBlackTurn == True):
self.turnIndicator.configure(text='White Turn')
self.gameState.isBlackTurn = False
else:
self.turnIndicator.configure(text='Black Turn')
self.gameState.isBlackTurn = True
shog_recorder().recordMove(pos, resultPromotion, resultCapture, resultDrop, newMatrixPosYlocal, newMatrixPosXlocal)
self.gameState.possibleMoveMatrix *= 0
#Now we check if its a checkmate
if (self.gameState.isCheck == True):
#Get all of your available moves
resetMatrix = copy.deepcopy(self.gameState.gameMatrix)
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
self.populateSimulMoveArrays(i, j, str(self.gameState.gameMatrix[i][j]), True)
for i in range(0, len(self.simulMoveMatrix)):
if (self.simulateMove (self.simulMoveMatrixPre[i][0], self.simulMoveMatrixPre[i][1], self.simulMoveMatrixPre[i][2], self.simulMoveMatrix[i][0], self.simulMoveMatrix[i][1], i) == False):
print i
break
print (i, len(self.simulMoveMatrix))
if i == (len(self.simulMoveMatrix) - 1):
if ('p' not in pos):
print 'Checkmate!'
else:
print 'You can not check mate by dropping a pawn'
old_fill = old_state_pos
if old_fill == 0:
old_fill = ''
print 'Resetting old position: ' + str(old_fill) + ' as move ' + str(pos) + ' is illegal'
#Load back or direct drop?
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
#self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = old_state_pos
self.resetBoardGraphics()
self.softReset()
return
self.simulMoveMatrixPre *= 0
self.simulMoveMatrix *= 0
def populateSimulMoveArrays(self, oldMatrixPosX, oldMatrixPosY, pos, Turn):
kingspace = False
if (self.gameState.isBlackTurn == True and pos[:-1] == 'B') or (self.gameState.isBlackTurn == False and pos[:-1] == 'W'):
with open('movesets.txt') as f:
content = f.readlines()
for index in range(len(content)):
if pos[-1:] in content[index]:
movesets = content[index].split('=')[1]
#cast movesets to array
possiblemovelayouts = eval(movesets)
for j in range(len(possiblemovelayouts)):
x_dif = int((possiblemovelayouts[j])[0])
y_dif = int((possiblemovelayouts[j])[1])
if pos[:-1] == 'B':
x_dif = -1 * x_dif
y_dif = -1 * y_dif
if pos[:-1] == 'W':
x_dif = 1 * x_dif
y_dif = 1 * y_dif
try:
#If the piece is Black and youre black, stop
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == True):
break
#If the piece is White and youre White, stop
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == False):
break
#If the piece is White and youre Black, you can capture so color
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'B') and self.gameState.isBlackTurn == True):
if (Turn == False and (self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].cget('background')) == 'blue'):
pass
else:
self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].configure(background='pink')
self.simulMoveMatrixPre.append((oldMatrixPosX, oldMatrixPosY, pos))
self.simulMoveMatrix.append((oldMatrixPosX + x_dif, oldMatrixPosY + y_dif, pos))
#If the piece is Black and youre White, you can capture so color
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] != 'W') and self.gameState.isBlackTurn == False):
if (Turn == False and (self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].cget('background')) == 'pink') or (Turn == True and 'k' in pos and (self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].cget('background')) == 'pink'):
pass
else:
self.cells[(oldMatrixPosX + x_dif, oldMatrixPosY + y_dif)].configure(background='blue')
self.simulMoveMatrixPre.append((oldMatrixPosX, oldMatrixPosY, pos))
self.simulMoveMatrix.append((oldMatrixPosX + x_dif, oldMatrixPosY + y_dif, pos))
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'W') and self.gameState.isBlackTurn == True and (str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[1:] != 'k')):
break
if ((str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[:-1] == 'B') and self.gameState.isBlackTurn == False and (str(self.getPieceFrmPos(oldMatrixPosX + x_dif + 1, oldMatrixPosY + y_dif + 1))[1:] != 'k')):
break
except Exception as e:
pass
#Now we check if we have any drops. We only take the 8 squares surrounding the king as those are the ones that matter anyways.
return 1
else:
return 0
def simulateMove(self, oldMatrixPosXlocal, oldMatrixPosYlocal, pos, newMatrixPosXlocal, newMatrixPosYlocal, iteration):
print 'ITERATION: ' + str(iteration) + ' USING ' + str((oldMatrixPosXlocal, oldMatrixPosYlocal, pos, newMatrixPosXlocal, newMatrixPosYlocal))
#Get current pre-move position we are moving to
old_state_pos = self.getPieceFrmPos(newMatrixPosXlocal + 1, newMatrixPosYlocal + 1)
print old_state_pos
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = pos
self.gameState.gameMatrix[oldMatrixPosXlocal][oldMatrixPosYlocal] = 0
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text='')
#Check for checks
#This method. Is. perfect.
if self.gameState.isCheck == False:
#Does our move reveal a check for the other team?
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
if self.gameState.isCheck == True:
print 'ILLEGAL MOVE: Reveals check'
break
if self.gameState.isCheck == True:
break
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isCheck == False:
#Does our move give a check?
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
else:
#Does our move get us out of a check?
print 'Now that the opponents move has been made, lets check if check is still valid'
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
#reminder: [y axis][x axis]
if self.gameState.isBlackTurn == True:
kingcolor = self.cells[self.getPosFromPiece('Wk')].cget('background')
else:
kingcolor = self.cells[self.getPosFromPiece('Bk')].cget('background')
if (kingcolor == 'cyan') :
print 'Still in check, Restart that move'
old_fill = old_state_pos
if old_fill == 0:
old_fill = ''
print 'Resetting old position: ' + str(old_fill) + ' as move ' + str(pos) + ' is illegal'
#Load back or direct drop?
if (self.gameState.board_size == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(str(old_fill)[-1:]))
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(str(pos)[-1:]))
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=str(old_fill)[-1:])
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text=str(pos)[-1:])
#self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = old_state_pos
self.gameState.gameMatrix[oldMatrixPosXlocal][oldMatrixPosYlocal] = pos
self.resetBoardGraphics()
self.softReset()
return True
else:
old_fill = old_state_pos
if old_fill == 0:
old_fill = ''
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(str(old_fill)[-1:]))
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(str(pos)[-1:]))
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=str(old_fill)[-1:])
self.cells[(oldMatrixPosXlocal, oldMatrixPosYlocal)].configure(text=str(pos)[-1:])
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = old_state_pos
self.gameState.gameMatrix[oldMatrixPosXlocal][oldMatrixPosYlocal] = pos
self.resetBoardGraphics()
self.softReset()
print 'King is out of check, continue play'
self.gameState.isCheck = False
return False
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=pos[-1:])
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.resetBoardGraphics()
self.gameState.newMatrixPosX = None
self.gameState.newMatrixPosY = None
self.gameState.oldMatrixPosX = None
self.gameState.oldMatrixPosY = None
self.gameState.pieceSelected = None
if (self.gameState.isBlackTurn == True):
self.turnIndicator.configure(text='White Turn')
self.gameState.isBlackTurn = False
else:
self.turnIndicator.configure(text='Black Turn')
self.gameState.isBlackTurn = True
self.gameState.possibleMoveMatrix *= 0
def simulateDrop(self, pos, newMatrixPosXlocal, newMatrixPosYlocal):
#Get current pre-move position we are moving to
old_state_pos = self.getPieceFrmPos(newMatrixPosXlocal + 1, newMatrixPosYlocal + 1)
print old_state_pos
#No Capturing or Promotion
self.gameState.gameState = 0
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = pos
#Check for checks
#This method. Is. perfect.
if self.gameState.isCheck == False:
#Does our move reveal a check for the other team?
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
if self.gameState.isCheck == True:
print 'ILLEGAL MOVE: Reveals check'
break
if self.gameState.isCheck == True:
break
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
if self.gameState.isCheck == False:
#Does our move give a check?
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
else:
#Does our move get us out of a check?
print 'Now that the opponents move has been made, lets check if check is still valid'
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
for i in range(0, self.gameState.board_size):
for j in range(0, self.gameState.board_size):
p = self.getPieceFrmPos(i + 1, j + 1)
if p != 0:
self.isKingUnderCheck(i, j, p)
#reminder: [y axis][x axis]
if self.gameState.isBlackTurn == True:
kingcolor = self.cells[self.getPosFromPiece('Wk')].cget('background')
else:
kingcolor = self.cells[self.getPosFromPiece('Bk')].cget('background')
if (kingcolor == 'cyan') :
print 'Still in check, Restart that move'
old_fill = old_state_pos
if old_fill == 0:
old_fill = ''
print 'Resetting old position: ' + str(old_fill) + ' as move ' + str(pos) + ' is illegal'
#Load back or direct drop?
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
#self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = old_state_pos
self.resetBoardGraphics()
self.softReset()
return True
else:
print 'King is out of check, continue play'
old_fill = old_state_pos
if old_fill == 0:
old_fill = ''
print 'Resetting old position: ' + str(old_fill) + ' as move ' + str(pos) + ' is illegal'
#Load back or direct drop?
if (self.gameState.isBlackTurn == True):
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
else:
self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text='')
#self.cells[(newMatrixPosXlocal, newMatrixPosYlocal)].configure(text=upsidedown.convChartoUpsideDown(pos[-1:]))
self.gameState.isBlackTurn = not self.gameState.isBlackTurn
self.gameState.gameMatrix[newMatrixPosXlocal][newMatrixPosYlocal] = old_state_pos
self.resetBoardGraphics()
self.softReset()
return False
| 49.398253
| 276
| 0.524867
| 5,331
| 56,561
| 5.513412
| 0.057775
| 0.149054
| 0.107784
| 0.051443
| 0.845842
| 0.824816
| 0.794876
| 0.77749
| 0.769665
| 0.746768
| 0
| 0.011071
| 0.377168
| 56,561
| 1,144
| 277
| 49.441434
| 0.823266
| 0.054048
| 0
| 0.783191
| 0
| 0
| 0.03906
| 0.000861
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.00609
| 0.008526
| null | null | 0.079172
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8685eae534c42eb28ae165076f08aa60980639ca
| 166
|
py
|
Python
|
lisa/__main__.py
|
mjirik/lisa
|
06c5cb8f375f51302341e768512f02236774c8a3
|
[
"BSD-3-Clause"
] | 22
|
2015-01-26T12:58:54.000Z
|
2021-04-15T17:48:13.000Z
|
lisa/__main__.py
|
mjirik/lisa
|
06c5cb8f375f51302341e768512f02236774c8a3
|
[
"BSD-3-Clause"
] | 31
|
2015-01-23T14:46:13.000Z
|
2018-05-18T14:47:18.000Z
|
lisa/__main__.py
|
mjirik/lisa
|
06c5cb8f375f51302341e768512f02236774c8a3
|
[
"BSD-3-Clause"
] | 13
|
2015-06-30T08:54:27.000Z
|
2020-09-11T16:08:19.000Z
|
# This is because we want to draw splash screen before time-consuming imports
from lisa.main import lisa_main
# from .main import lisa_main
# import main
lisa_main()
| 27.666667
| 77
| 0.795181
| 28
| 166
| 4.607143
| 0.607143
| 0.248062
| 0.217054
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156627
| 166
| 5
| 78
| 33.2
| 0.921429
| 0.692771
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
86a0f9d367c6e28bfb24765a06cabb009272d0dc
| 20,563
|
py
|
Python
|
sib_api_v3_sdk/api/transactional_sms_api.py
|
tarraschk/APIv3-python-library
|
440883d3a7ca503a655f16bf69cef6c122a95e01
|
[
"MIT"
] | null | null | null |
sib_api_v3_sdk/api/transactional_sms_api.py
|
tarraschk/APIv3-python-library
|
440883d3a7ca503a655f16bf69cef6c122a95e01
|
[
"MIT"
] | null | null | null |
sib_api_v3_sdk/api/transactional_sms_api.py
|
tarraschk/APIv3-python-library
|
440883d3a7ca503a655f16bf69cef6c122a95e01
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
SendinBlue API
SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable | # noqa: E501
OpenAPI spec version: 3.0.0
Contact: contact@sendinblue.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from sib_api_v3_sdk.api_client import ApiClient
class TransactionalSMSApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_sms_events(self, **kwargs): # noqa: E501
"""Get all your SMS activity (unaggregated events) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sms_events(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: Number of documents per page
:param str start_date: Mandatory if endDate is used. Starting date (YYYY-MM-DD) of the report
:param str end_date: Mandatory if startDate is used. Ending date (YYYY-MM-DD) of the report
:param int offset: Index of the first document of the page
:param int days: Number of days in the past including today (positive integer). Not compatible with 'startDate' and 'endDate'
:param str phone_number: Filter the report for a specific phone number
:param str event: Filter the report for specific events
:param str tags: Filter the report for specific tags passed as a serialized urlencoded array
:return: GetSmsEventReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_sms_events_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_sms_events_with_http_info(**kwargs) # noqa: E501
return data
def get_sms_events_with_http_info(self, **kwargs): # noqa: E501
"""Get all your SMS activity (unaggregated events) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sms_events_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: Number of documents per page
:param str start_date: Mandatory if endDate is used. Starting date (YYYY-MM-DD) of the report
:param str end_date: Mandatory if startDate is used. Ending date (YYYY-MM-DD) of the report
:param int offset: Index of the first document of the page
:param int days: Number of days in the past including today (positive integer). Not compatible with 'startDate' and 'endDate'
:param str phone_number: Filter the report for a specific phone number
:param str event: Filter the report for specific events
:param str tags: Filter the report for specific tags passed as a serialized urlencoded array
:return: GetSmsEventReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['limit', 'start_date', 'end_date', 'offset', 'days', 'phone_number', 'event', 'tags'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sms_events" % key
)
params[key] = val
del params['kwargs']
if 'limit' in params and params['limit'] > 100: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_sms_events`, must be a value less than or equal to `100`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'phone_number' in params:
query_params.append(('phoneNumber', params['phone_number'])) # noqa: E501
if 'event' in params:
query_params.append(('event', params['event'])) # noqa: E501
if 'tags' in params:
query_params.append(('tags', params['tags'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api-key', 'partner-key'] # noqa: E501
return self.api_client.call_api(
'/transactionalSMS/statistics/events', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetSmsEventReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_transac_aggregated_sms_report(self, **kwargs): # noqa: E501
"""Get your SMS activity aggregated over a period of time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transac_aggregated_sms_report(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str start_date: Mandatory if endDate is used. Starting date (YYYY-MM-DD) of the report
:param str end_date: Mandatory if startDate is used. Ending date (YYYY-MM-DD) of the report
:param int days: Number of days in the past including today (positive integer). Not compatible with startDate and endDate
:param str tag: Filter on a tag
:return: GetTransacAggregatedSmsReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_transac_aggregated_sms_report_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_transac_aggregated_sms_report_with_http_info(**kwargs) # noqa: E501
return data
def get_transac_aggregated_sms_report_with_http_info(self, **kwargs): # noqa: E501
"""Get your SMS activity aggregated over a period of time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transac_aggregated_sms_report_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str start_date: Mandatory if endDate is used. Starting date (YYYY-MM-DD) of the report
:param str end_date: Mandatory if startDate is used. Ending date (YYYY-MM-DD) of the report
:param int days: Number of days in the past including today (positive integer). Not compatible with startDate and endDate
:param str tag: Filter on a tag
:return: GetTransacAggregatedSmsReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_date', 'end_date', 'days', 'tag'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_transac_aggregated_sms_report" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'tag' in params:
query_params.append(('tag', params['tag'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api-key', 'partner-key'] # noqa: E501
return self.api_client.call_api(
'/transactionalSMS/statistics/aggregatedReport', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTransacAggregatedSmsReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_transac_sms_report(self, **kwargs): # noqa: E501
"""Get your SMS activity aggregated per day # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transac_sms_report(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str start_date: Mandatory if endDate is used. Starting date (YYYY-MM-DD) of the report
:param str end_date: Mandatory if startDate is used. Ending date (YYYY-MM-DD) of the report
:param int days: Number of days in the past including today (positive integer). Not compatible with 'startDate' and 'endDate'
:param str tag: Filter on a tag
:return: GetTransacSmsReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_transac_sms_report_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_transac_sms_report_with_http_info(**kwargs) # noqa: E501
return data
def get_transac_sms_report_with_http_info(self, **kwargs): # noqa: E501
"""Get your SMS activity aggregated per day # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transac_sms_report_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str start_date: Mandatory if endDate is used. Starting date (YYYY-MM-DD) of the report
:param str end_date: Mandatory if startDate is used. Ending date (YYYY-MM-DD) of the report
:param int days: Number of days in the past including today (positive integer). Not compatible with 'startDate' and 'endDate'
:param str tag: Filter on a tag
:return: GetTransacSmsReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_date', 'end_date', 'days', 'tag'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_transac_sms_report" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
if 'days' in params:
query_params.append(('days', params['days'])) # noqa: E501
if 'tag' in params:
query_params.append(('tag', params['tag'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api-key', 'partner-key'] # noqa: E501
return self.api_client.call_api(
'/transactionalSMS/statistics/reports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTransacSmsReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_transac_sms(self, send_transac_sms, **kwargs): # noqa: E501
"""Send the SMS campaign to a mobile number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_transac_sms(send_transac_sms, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SendTransacSms send_transac_sms: Values to send a transactional SMS (required)
:return: SendSms
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_transac_sms_with_http_info(send_transac_sms, **kwargs) # noqa: E501
else:
(data) = self.send_transac_sms_with_http_info(send_transac_sms, **kwargs) # noqa: E501
return data
def send_transac_sms_with_http_info(self, send_transac_sms, **kwargs): # noqa: E501
"""Send the SMS campaign to a mobile number # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_transac_sms_with_http_info(send_transac_sms, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SendTransacSms send_transac_sms: Values to send a transactional SMS (required)
:return: SendSms
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['send_transac_sms'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_transac_sms" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'send_transac_sms' is set
if ('send_transac_sms' not in params or
params['send_transac_sms'] is None):
raise ValueError("Missing the required parameter `send_transac_sms` when calling `send_transac_sms`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'send_transac_sms' in params:
body_params = params['send_transac_sms']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api-key', 'partner-key'] # noqa: E501
return self.api_client.call_api(
'/transactionalSMS/sms', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendSms', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.221505
| 856
| 0.62666
| 2,506
| 20,563
| 4.927773
| 0.108939
| 0.044052
| 0.033039
| 0.023322
| 0.869301
| 0.856102
| 0.845251
| 0.834076
| 0.824844
| 0.823872
| 0
| 0.017118
| 0.281233
| 20,563
| 464
| 857
| 44.31681
| 0.818403
| 0.405388
| 0
| 0.754098
| 0
| 0.004098
| 0.19133
| 0.043261
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036885
| false
| 0
| 0.016393
| 0
| 0.106557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8104799d73593a06dbcabbed42b5946482338e79
| 8,681
|
py
|
Python
|
tests/unit/language/ast/test_input_value_definition.py
|
matt-koevort/tartiflette
|
5777866b133d846ce4f8aa03f735fa81832896cd
|
[
"MIT"
] | 530
|
2019-06-04T11:45:36.000Z
|
2022-03-31T09:29:56.000Z
|
tests/unit/language/ast/test_input_value_definition.py
|
matt-koevort/tartiflette
|
5777866b133d846ce4f8aa03f735fa81832896cd
|
[
"MIT"
] | 242
|
2019-06-04T11:53:08.000Z
|
2022-03-28T07:06:27.000Z
|
tests/unit/language/ast/test_input_value_definition.py
|
matt-koevort/tartiflette
|
5777866b133d846ce4f8aa03f735fa81832896cd
|
[
"MIT"
] | 36
|
2019-06-21T06:40:27.000Z
|
2021-11-04T13:11:16.000Z
|
import pytest
from tartiflette.language.ast import InputValueDefinitionNode
def test_inputvaluedefinitionnode__init__():
input_value_definition_node = InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
)
assert input_value_definition_node.name == "inputValueDefinitionName"
assert input_value_definition_node.type == "inputValueDefinitionType"
assert (
input_value_definition_node.description
== "inputValueDefinitionDescription"
)
assert (
input_value_definition_node.default_value
== "inputValueDefinitionDefaultValue"
)
assert (
input_value_definition_node.directives
== "inputValueDefinitionDirectives"
)
assert (
input_value_definition_node.location == "inputValueDefinitionLocation"
)
@pytest.mark.parametrize(
"input_value_definition_node,other,expected",
[
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
Ellipsis,
False,
),
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
InputValueDefinitionNode(
name="inputValueDefinitionNameBis",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
False,
),
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionTypeBis",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
False,
),
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescriptionBis",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
False,
),
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValueBis",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
False,
),
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectivesBis",
location="inputValueDefinitionLocation",
),
False,
),
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocationBis",
),
False,
),
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
True,
),
],
)
def test_inputvaluedefinitionnode__eq__(
input_value_definition_node, other, expected
):
assert (input_value_definition_node == other) is expected
@pytest.mark.parametrize(
"input_value_definition_node,expected",
[
(
InputValueDefinitionNode(
name="inputValueDefinitionName",
type="inputValueDefinitionType",
description="inputValueDefinitionDescription",
default_value="inputValueDefinitionDefaultValue",
directives="inputValueDefinitionDirectives",
location="inputValueDefinitionLocation",
),
"InputValueDefinitionNode("
"description='inputValueDefinitionDescription', "
"name='inputValueDefinitionName', "
"type='inputValueDefinitionType', "
"default_value='inputValueDefinitionDefaultValue', "
"directives='inputValueDefinitionDirectives', "
"location='inputValueDefinitionLocation')",
)
],
)
def test_inputvaluedefinitionnode__repr__(
input_value_definition_node, expected
):
assert input_value_definition_node.__repr__() == expected
| 40.189815
| 78
| 0.61721
| 371
| 8,681
| 14.234501
| 0.115903
| 0.043174
| 0.149972
| 0.173831
| 0.86139
| 0.821814
| 0.800985
| 0.761219
| 0.761219
| 0.761219
| 0
| 0
| 0.31137
| 8,681
| 215
| 79
| 40.376744
| 0.883406
| 0
| 0
| 0.735577
| 0
| 0
| 0.392927
| 0.392351
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.014423
| false
| 0
| 0.009615
| 0
| 0.024038
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8140b6ac85e42b0003f1bed076e91f931a2caf2f
| 10,062
|
py
|
Python
|
BookClub/tests/views/club_views/test_edit_club.py
|
amir-rahim/BookClubSocialNetwork
|
b69a07cd33592f700214252a64c7c1c53845625d
|
[
"MIT"
] | 4
|
2022-02-04T02:11:48.000Z
|
2022-03-12T21:38:01.000Z
|
BookClub/tests/views/club_views/test_edit_club.py
|
amir-rahim/BookClubSocialNetwork
|
b69a07cd33592f700214252a64c7c1c53845625d
|
[
"MIT"
] | 51
|
2022-02-01T18:56:23.000Z
|
2022-03-31T15:35:37.000Z
|
BookClub/tests/views/club_views/test_edit_club.py
|
amir-rahim/BookClubSocialNetwork
|
b69a07cd33592f700214252a64c7c1c53845625d
|
[
"MIT"
] | null | null | null |
"""Unit testing of the Edit Club view"""
from django.contrib.messages import get_messages
from django.test import TestCase, tag
from django.urls import reverse
from BookClub.models.club import Club
from BookClub.models.user import User
from BookClub.tests.helpers import reverse_with_next
@tag('views', 'club', 'edit_club')
class EditClubViewTestCase(TestCase):
"""Testing the Edit Club view"""
fixtures = [
"BookClub/tests/fixtures/default_users.json",
"BookClub/tests/fixtures/default_clubs.json",
"BookClub/tests/fixtures/default_club_owners.json",
"BookClub/tests/fixtures/edit_club_rank_required_helper_fixtures.json"
]
def setUp(self):
super(TestCase, self).setUp()
self.user = User.objects.get(username='johndoe')
self.club = Club.objects.get(pk=1)
self.url = reverse('edit_club', kwargs={'club_url_name': self.club.club_url_name})
self.data = {
'description': 'This is a very cool club that is owned by a certain Johnathan. Reading certain books...',
'tagline': 'Welcome to Johnathan\'s club! We read the best books!!!',
'rules': 'Don\'t be annoying',
'is_private': False,
'created_on': self.club.created_on,
}
def test_edit_club_url(self):
self.assertEqual(self.url, '/club/' + self.club.club_url_name + '/edit/')
def test_post_edit_club_redirects_when_not_logged_in(self):
redirect_url = reverse_with_next('login', self.url)
response = self.client.post(self.url, self.data, follow=True)
self.assertRedirects(response, redirect_url,
status_code=302, target_status_code=200, fetch_redirect_response=True
)
self.assertTemplateUsed(response, 'authentication/login.html')
def test_post_edit_club_when_non_existing_club(self):
self.client.login(username=self.user.username, password='Password123')
bad_url = reverse('edit_club', kwargs={'club_url_name': "NONE"})
redirect_url = reverse('available_clubs')
response = self.client.post(bad_url, self.data, follow=True)
self.assertRedirects(response, redirect_url,
status_code=302, target_status_code=200, fetch_redirect_response=True
)
self.assertTemplateUsed(response, 'clubs/available_clubs.html')
def test_get_edit_club_redirects_when_not_logged_in_invalid_club(self):
url = reverse('edit_club', kwargs={'club_url_name': 'fakeclub'})
redirect_url = reverse_with_next('login', url)
response = self.client.get(url, follow=True)
self.assertRedirects(response, redirect_url,
status_code=302, target_status_code=200, fetch_redirect_response=True
)
self.assertTemplateUsed(response, 'authentication/login.html')
def test_edit_create_club_logged_in_correct_rank(self):
self.client.login(username=self.user.username, password='Password123')
session = self.client.session
session['club_id'] = self.club.id
session.save()
response = self.client.get(self.url)
messages = list(get_messages(response.wsgi_request))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'clubs/edit_club.html')
self.assertEqual(len(messages), 0)
def test_edit_club_logged_in_not_in_club(self):
self.client.login(username='janedoe', password='Password123')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
self.assertTemplateNotUsed(response, 'edit_club.html')
def test_edit_club_logged_in_member_rank(self):
user = User.objects.get(pk=3)
self.client.login(username=user.username, password='Password123')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
self.assertTemplateNotUsed(response, 'edit_club.html')
messages = list(get_messages(response.wsgi_request))
self.assertEqual(len(messages), 1)
def test_edit_club_logged_in_moderator_rank(self):
user = User.objects.get(pk=5)
self.client.login(username=user.username, password='Password123')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
self.assertTemplateNotUsed(response, 'edit_club.html')
messages = list(get_messages(response.wsgi_request))
self.assertEqual(len(messages), 1)
def test_edit_club_post_valid_data_name_change(self):
self.client.login(username=self.user.username, password='Password123')
self.created_on_pre_test = self.club.created_on
self.data['name'] = 'Test club'
response = self.client.post(self.url, self.data)
self.club = Club.objects.get(pk=1)
responseUrl = reverse('club_dashboard', kwargs={'club_url_name': self.club.club_url_name})
self.assertRedirects(response, expected_url=responseUrl, status_code=302, target_status_code=200)
self.assertEqual(self.club.description, self.data['description'])
self.assertEqual(self.club.tagline, self.data['tagline'])
self.assertEqual(self.club.rules, self.data['rules'])
self.assertEqual(self.club.is_private, self.data['is_private'])
self.assertEqual(self.club.created_on, self.created_on_pre_test)
self.assertEqual(response.status_code, 302)
def test_edit_club_post_valid_data_description_change(self):
self.client.login(username=self.user.username, password='Password123')
self.created_on_pre_test = self.club.created_on
self.data['description'] = "test description"
response = self.client.post(self.url, self.data)
self.club = Club.objects.get(pk=1)
responseUrl = reverse('club_dashboard', kwargs={'club_url_name': self.club.club_url_name})
self.assertRedirects(response, expected_url=responseUrl, status_code=302, target_status_code=200)
self.assertEqual(self.club.description, self.data['description'])
self.assertEqual(self.club.tagline, self.data['tagline'])
self.assertEqual(self.club.rules, self.data['rules'])
self.assertEqual(self.club.is_private, self.data['is_private'])
self.assertEqual(self.club.created_on, self.created_on_pre_test)
self.assertEqual(response.status_code, 302)
def test_edit_club_post_valid_data_rules_change(self):
self.client.login(username=self.user.username, password='Password123')
self.created_on_pre_test = self.club.created_on
self.data['rules'] = "test rules"
response = self.client.post(self.url, self.data)
self.club = Club.objects.get(pk=1)
responseUrl = reverse('club_dashboard', kwargs={'club_url_name': self.club.club_url_name})
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, expected_url=responseUrl, status_code=302, target_status_code=200)
self.assertEqual(self.club.description, self.data['description'])
self.assertEqual(self.club.tagline, self.data['tagline'])
self.assertEqual(self.club.rules, self.data['rules'])
self.assertEqual(self.club.is_private, self.data['is_private'])
self.assertEqual(self.club.created_on, self.created_on_pre_test)
def test_edit_club_post_valid_data_tagline_change(self):
self.client.login(username=self.user.username, password='Password123')
self.created_on_pre_test = self.club.created_on
self.data['tagline'] = "tagline test"
response = self.client.post(self.url, self.data)
self.club = Club.objects.get(pk=1)
responseUrl = reverse('club_dashboard', kwargs={'club_url_name': self.club.club_url_name})
self.assertRedirects(response, expected_url=responseUrl, status_code=302, target_status_code=200)
self.assertEqual(self.club.description, self.data['description'])
self.assertEqual(self.club.tagline, self.data['tagline'])
self.assertEqual(self.club.rules, self.data['rules'])
self.assertEqual(self.club.is_private, self.data['is_private'])
self.assertEqual(self.club.created_on, self.created_on_pre_test)
self.assertEqual(response.status_code, 302)
def test_edit_club_post_valid_data_is_private_change(self):
self.client.login(username=self.user.username, password='Password123')
self.created_on_pre_test = self.club.created_on
self.data['is_private'] = True
response = self.client.post(self.url, self.data)
self.club = Club.objects.get(pk=1)
responseUrl = reverse('club_dashboard', kwargs={'club_url_name': self.club.club_url_name})
self.assertRedirects(response, expected_url=responseUrl, status_code=302, target_status_code=200)
self.assertEqual(self.club.description, self.data['description'])
self.assertEqual(self.club.tagline, self.data['tagline'])
self.assertEqual(self.club.rules, self.data['rules'])
self.assertEqual(self.club.is_private, self.data['is_private'])
self.assertEqual(self.club.created_on, self.created_on_pre_test)
self.assertEqual(response.status_code, 302)
def test_edit_club_post_invalid_data_description(self):
self.client.login(username=self.user.username, password='Password123')
self.created_on_pre_test = self.club.created_on
self.data['description'] = ""
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 200)
self.club = Club.objects.get(pk=1)
self.assertNotEqual(self.club.description, self.data['description'])
self.assertEqual(self.club.tagline, self.data['tagline'])
self.assertEqual(self.club.rules, self.data['rules'])
self.assertEqual(self.club.is_private, self.data['is_private'])
self.assertEqual(self.club.created_on, self.created_on_pre_test)
| 53.521277
| 117
| 0.698668
| 1,285
| 10,062
| 5.25214
| 0.10428
| 0.061639
| 0.084457
| 0.098829
| 0.820122
| 0.801156
| 0.774633
| 0.737591
| 0.720551
| 0.703067
| 0
| 0.014945
| 0.182071
| 10,062
| 187
| 118
| 53.807487
| 0.805103
| 0.006062
| 0
| 0.593939
| 0
| 0
| 0.11972
| 0.027628
| 0
| 0
| 0
| 0
| 0.357576
| 1
| 0.090909
| false
| 0.066667
| 0.036364
| 0
| 0.139394
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d4b975608a88bde77bb116c91eed7360b2cce113
| 6,619
|
py
|
Python
|
qdev_transmon_helpers/sequencing/floquet_new.py
|
QCoDeS/Qcodes-contrib
|
9f94e166b70aed053f3da97833b3868eb554ffb8
|
[
"MIT"
] | 2
|
2021-01-25T03:47:34.000Z
|
2021-01-25T03:48:13.000Z
|
qdev_transmon_helpers/sequencing/floquet_new.py
|
QCoDeS/Qcodes-contrib
|
9f94e166b70aed053f3da97833b3868eb554ffb8
|
[
"MIT"
] | null | null | null |
qdev_transmon_helpers/sequencing/floquet_new.py
|
QCoDeS/Qcodes-contrib
|
9f94e166b70aed053f3da97833b3868eb554ffb8
|
[
"MIT"
] | 1
|
2017-02-17T09:02:43.000Z
|
2017-02-17T09:02:43.000Z
|
from . import make_readout_wf, get_calibration_dict, \
cos_array, sin_array, flat_array, gaussian_array, \
make_time_varying_sequence
from . import Segment, Waveform, Element
def make_floquet_dur_sequence(start, stop, step, amp=1, floquet_freq=1e6,
channels=[1, 4], form='cos'):
calib_dict = get_calibration_dict()
qubit = calib_dict['current_qubit']
time_after_qubit = (calib_dict['cycle_time'][qubit] -
calib_dict['pulse_end'][qubit])
compensating_wait_segment = Segment(
name='compensating_wait', gen_func=flat_array, func_args={'amp': 0})
if form == 'cos':
floquet_drive = Segment(
name='floquet_drive', gen_func=cos_array,
func_args={'amp': amp, 'freq': floquet_freq})
elif form == 'sin':
floquet_drive = Segment(
name='floquet_drive', gen_func=sin_array,
func_args={'amp': amp, 'freq': floquet_freq})
else:
raise Exception('unrecognised form, should be cos or sin')
wait_segment = Segment(
name='wait', gen_func=flat_array,
func_args={'amp': 0, 'dur': time_after_qubit})
qubit_wf = Waveform(
channel=channels[0],
segment_list=[compensating_wait_segment, floquet_drive, wait_segment])
readout_wf = make_readout_wf(channel=channels[-1])
floquet_element = Element(sample_rate=calib_dict['sample_rate'][qubit])
floquet_element.add_waveform(qubit_wf)
floquet_element.add_waveform(readout_wf)
marker_points = int(calib_dict['marker_time'][qubit] *
calib_dict['sample_rate'][qubit])
floquet_sequence = make_time_varying_sequence(
floquet_element, channels[0], 1, 'dur', start, stop, step,
0, calib_dict['cycle_time'][qubit],
name='floquet_seq',
variable_name='floquet_drive_dur', variable_unit='s',
readout_ch=channels[-1], marker_points=marker_points)
floquet_sequence.labels = {'seq_type': 'floquet'}
return floquet_sequence
def make_floquet_dur_seq_gated(start, stop, step, amp=1, floquet_freq=1e6,
channels=[1, 4], form='cos',
pi_half_before=True, pi_half_after=False,
gaussian=True, pi_half_after_neg=False):
calib_dict = get_calibration_dict()
qubit = calib_dict['current_qubit']
time_after_qubit = (calib_dict['cycle_time'][qubit] -
calib_dict['pulse_end'][qubit])
pi_half_amp = calib_dict['pi_half_pulse_amp'][qubit]
compensating_wait_segment = Segment(
name='compensating_wait', gen_func=flat_array, func_args={'amp': 0})
if form == 'cos':
floquet_drive = Segment(
name='floquet_drive', gen_func=cos_array,
func_args={'amp': amp, 'freq': floquet_freq})
elif form == 'sin':
floquet_drive = Segment(
name='floquet_drive', gen_func=sin_array,
func_args={'amp': amp, 'freq': floquet_freq})
else:
raise Exception('unrecognised form, should be cos or sin')
wait_segment = Segment(
name='wait', gen_func=flat_array,
func_args={'amp': 0, 'dur': time_after_qubit})
if gaussian:
pi_half_sigma = calib_dict['pi_pulse_sigma'][qubit]
pi_half_segment = Segment(
name='gaussian_pi_pulse', gen_func=gaussian_array,
func_args={'sigma_cutoff': calib_dict['sigma_cutoff'][qubit],
'amp': pi_half_amp, 'sigma': pi_half_sigma})
pi_half_segment_neg = Segment(
name='gaussian_pi_pulse', gen_func=gaussian_array,
func_args={'sigma_cutoff': calib_dict['sigma_cutoff'][qubit],
'amp': pi_half_amp, 'sigma': pi_half_sigma,
'positive': False})
else:
pi_half_dur = calib_dict['pi_half_pulse_dur'][qubit]
pi_half_segment = Segment(
name='square_pi_pulse', gen_func=flat_array,
func_args={'amp': pi_half_amp, 'dur': pi_half_dur})
pi_half_segment_neg = Segment(
name='square_pi_pulse', gen_func=flat_array,
func_args={'amp': -1 * pi_half_amp, 'dur': pi_half_dur})
if pi_half_before and pi_half_after and not pi_half_after_neg:
qubit_wf = Waveform(
channel=channels[0],
segment_list=[compensating_wait_segment, pi_half_segment,
floquet_drive, pi_half_segment, wait_segment])
floquet_seg_index = 2
elif pi_half_before and pi_half_after:
qubit_wf = Waveform(
channel=channels[0],
segment_list=[compensating_wait_segment, pi_half_segment,
floquet_drive, pi_half_segment_neg, wait_segment])
floquet_seg_index = 2
elif pi_half_before:
qubit_wf = Waveform(
channel=channels[0],
segment_list=[compensating_wait_segment, pi_half_segment,
floquet_drive, wait_segment])
floquet_seg_index = 2
elif pi_half_after and not pi_half_after_neg:
qubit_wf = Waveform(
channel=channels[0],
segment_list=[compensating_wait_segment, floquet_drive,
pi_half_segment, wait_segment])
floquet_seg_index = 1
elif pi_half_after:
qubit_wf = Waveform(
channel=channels[0],
segment_list=[compensating_wait_segment, floquet_drive,
pi_half_segment_neg, wait_segment])
floquet_seg_index = 1
else:
qubit_wf = Waveform(
channel=channels[0],
segment_list=[compensating_wait_segment, floquet_drive,
wait_segment])
floquet_seg_index = 1
readout_wf = make_readout_wf(channel=channels[-1])
floquet_element = Element(sample_rate=calib_dict['sample_rate'][qubit])
floquet_element.add_waveform(qubit_wf)
floquet_element.add_waveform(readout_wf)
marker_points = int(calib_dict['marker_time'][qubit] *
calib_dict['sample_rate'][qubit])
floquet_sequence = make_time_varying_sequence(
floquet_element, channels[0], floquet_seg_index, 'dur',
start, stop, step,
0, calib_dict['cycle_time'][qubit],
name='floquet_seq',
variable_name='floquet_drive_dur', variable_unit='s',
readout_ch=channels[-1], marker_points=marker_points)
floquet_sequence.labels = {
'seq_type': 'floquet', 'pi_half_before': pi_half_before,
'pi_half_after': pi_half_after}
return floquet_sequence
| 43.261438
| 78
| 0.635292
| 817
| 6,619
| 4.73317
| 0.107711
| 0.062064
| 0.040341
| 0.041376
| 0.892682
| 0.87458
| 0.856219
| 0.840703
| 0.836824
| 0.828808
| 0
| 0.007556
| 0.26016
| 6,619
| 152
| 79
| 43.546053
| 0.782112
| 0
| 0
| 0.724638
| 0
| 0
| 0.106209
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014493
| false
| 0
| 0.014493
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4cc3e4bd2267a7174717f08aa5d528215255ca4
| 16,099
|
py
|
Python
|
plugins/proofpoint_tap/komand_proofpoint_tap/actions/get_top_clickers/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/proofpoint_tap/komand_proofpoint_tap/actions/get_top_clickers/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/proofpoint_tap/komand_proofpoint_tap/actions/get_top_clickers/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import insightconnect_plugin_runtime
import json
class Component:
DESCRIPTION = "Fetch the identities and attack index of the top clickers within your organization for a given period"
class Input:
WINDOW = "window"
class Output:
RESULTS = "results"
class GetTopClickersInput(insightconnect_plugin_runtime.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"window": {
"type": "integer",
"title": "Window",
"description": "An integer indicating how many days the data should be retrieved for",
"enum": [
14,
30,
90
],
"order": 1
}
},
"required": [
"window"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class GetTopClickersOutput(insightconnect_plugin_runtime.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"results": {
"$ref": "#/definitions/top_clickers",
"title": "Results",
"description": "The results containing top clickers",
"order": 1
}
},
"required": [
"results"
],
"definitions": {
"click_statistics": {
"type": "object",
"title": "click_statistics",
"properties": {
"clickCount": {
"type": "integer",
"title": "Click Count",
"description": "Click count",
"order": 1
},
"families": {
"type": "array",
"title": "Families",
"description": "Families",
"items": {
"$ref": "#/definitions/families"
},
"order": 2
}
},
"definitions": {
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
}
}
},
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
},
"identity": {
"type": "object",
"title": "identity",
"properties": {
"customerUserId": {
"type": "string",
"title": "Customer User ID",
"description": "Customer user ID",
"order": 2
},
"department": {
"type": "string",
"title": "Department",
"description": "Department",
"order": 5
},
"emails": {
"type": "array",
"title": "Emails",
"description": "Emails",
"items": {
"type": "string"
},
"order": 3
},
"guid": {
"type": "string",
"title": "GUID",
"description": "GUID",
"order": 1
},
"location": {
"type": "string",
"title": "Location",
"description": "Location",
"order": 6
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 4
},
"title": {
"type": "string",
"title": "Title",
"description": "Title",
"order": 7
},
"vip": {
"type": "boolean",
"title": "VIP",
"description": "VIP",
"order": 8
}
}
},
"top_clickers": {
"type": "object",
"title": "top_clickers",
"properties": {
"interval": {
"type": "string",
"title": "Interval",
"description": "An ISO8601-formatted interval showing what time the response was calculated for",
"order": 3
},
"totalTopClickers": {
"type": "integer",
"title": "Total Top Clickers",
"description": "An integer describing the total number of top clickers in the time interval",
"order": 2
},
"users": {
"type": "array",
"title": "Users",
"description": "An array of user objects that contain information about the user's identity and statistics of the clicking behavior",
"items": {
"$ref": "#/definitions/user"
},
"order": 1
}
},
"definitions": {
"click_statistics": {
"type": "object",
"title": "click_statistics",
"properties": {
"clickCount": {
"type": "integer",
"title": "Click Count",
"description": "Click count",
"order": 1
},
"families": {
"type": "array",
"title": "Families",
"description": "Families",
"items": {
"$ref": "#/definitions/families"
},
"order": 2
}
},
"definitions": {
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
}
}
},
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
},
"identity": {
"type": "object",
"title": "identity",
"properties": {
"customerUserId": {
"type": "string",
"title": "Customer User ID",
"description": "Customer user ID",
"order": 2
},
"department": {
"type": "string",
"title": "Department",
"description": "Department",
"order": 5
},
"emails": {
"type": "array",
"title": "Emails",
"description": "Emails",
"items": {
"type": "string"
},
"order": 3
},
"guid": {
"type": "string",
"title": "GUID",
"description": "GUID",
"order": 1
},
"location": {
"type": "string",
"title": "Location",
"description": "Location",
"order": 6
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 4
},
"title": {
"type": "string",
"title": "Title",
"description": "Title",
"order": 7
},
"vip": {
"type": "boolean",
"title": "VIP",
"description": "VIP",
"order": 8
}
}
},
"user": {
"type": "object",
"title": "user",
"properties": {
"clickStatistics": {
"$ref": "#/definitions/click_statistics",
"title": "Click Statistics",
"description": "Click statistics",
"order": 2
},
"identity": {
"$ref": "#/definitions/identity",
"title": "Identity",
"description": "Identity",
"order": 1
}
},
"definitions": {
"click_statistics": {
"type": "object",
"title": "click_statistics",
"properties": {
"clickCount": {
"type": "integer",
"title": "Click Count",
"description": "Click count",
"order": 1
},
"families": {
"type": "array",
"title": "Families",
"description": "Families",
"items": {
"$ref": "#/definitions/families"
},
"order": 2
}
},
"definitions": {
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
}
}
},
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
},
"identity": {
"type": "object",
"title": "identity",
"properties": {
"customerUserId": {
"type": "string",
"title": "Customer User ID",
"description": "Customer user ID",
"order": 2
},
"department": {
"type": "string",
"title": "Department",
"description": "Department",
"order": 5
},
"emails": {
"type": "array",
"title": "Emails",
"description": "Emails",
"items": {
"type": "string"
},
"order": 3
},
"guid": {
"type": "string",
"title": "GUID",
"description": "GUID",
"order": 1
},
"location": {
"type": "string",
"title": "Location",
"description": "Location",
"order": 6
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 4
},
"title": {
"type": "string",
"title": "Title",
"description": "Title",
"order": 7
},
"vip": {
"type": "boolean",
"title": "VIP",
"description": "VIP",
"order": 8
}
}
}
}
}
}
},
"user": {
"type": "object",
"title": "user",
"properties": {
"clickStatistics": {
"$ref": "#/definitions/click_statistics",
"title": "Click Statistics",
"description": "Click statistics",
"order": 2
},
"identity": {
"$ref": "#/definitions/identity",
"title": "Identity",
"description": "Identity",
"order": 1
}
},
"definitions": {
"click_statistics": {
"type": "object",
"title": "click_statistics",
"properties": {
"clickCount": {
"type": "integer",
"title": "Click Count",
"description": "Click count",
"order": 1
},
"families": {
"type": "array",
"title": "Families",
"description": "Families",
"items": {
"$ref": "#/definitions/families"
},
"order": 2
}
},
"definitions": {
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
}
}
},
"families": {
"type": "object",
"title": "families",
"properties": {
"clicks": {
"type": "integer",
"title": "Clicks",
"description": "Clicks",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 1
}
}
},
"identity": {
"type": "object",
"title": "identity",
"properties": {
"customerUserId": {
"type": "string",
"title": "Customer User ID",
"description": "Customer user ID",
"order": 2
},
"department": {
"type": "string",
"title": "Department",
"description": "Department",
"order": 5
},
"emails": {
"type": "array",
"title": "Emails",
"description": "Emails",
"items": {
"type": "string"
},
"order": 3
},
"guid": {
"type": "string",
"title": "GUID",
"description": "GUID",
"order": 1
},
"location": {
"type": "string",
"title": "Location",
"description": "Location",
"order": 6
},
"name": {
"type": "string",
"title": "Name",
"description": "Name",
"order": 4
},
"title": {
"type": "string",
"title": "Title",
"description": "Title",
"order": 7
},
"vip": {
"type": "boolean",
"title": "VIP",
"description": "VIP",
"order": 8
}
}
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 26.742525
| 143
| 0.342941
| 940
| 16,099
| 5.82766
| 0.12766
| 0.067543
| 0.090361
| 0.041621
| 0.813436
| 0.813436
| 0.813436
| 0.813436
| 0.795546
| 0.795546
| 0
| 0.009376
| 0.503137
| 16,099
| 601
| 144
| 26.787022
| 0.675459
| 0.002298
| 0
| 0.72449
| 1
| 0.001701
| 0.967248
| 0.015006
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003401
| false
| 0
| 0.003401
| 0
| 0.02381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d4f9ddc058af306810db4dcdfde19c5ec9b9669c
| 24,540
|
py
|
Python
|
tests/unit/io/DictTest.py
|
halotudio/openPNM-copy2
|
d400ec65e9421256a531f6d22a38255b002d5dcb
|
[
"MIT"
] | 1
|
2021-05-01T11:10:43.000Z
|
2021-05-01T11:10:43.000Z
|
tests/unit/io/DictTest.py
|
halotudio/openPNM-copy2
|
d400ec65e9421256a531f6d22a38255b002d5dcb
|
[
"MIT"
] | null | null | null |
tests/unit/io/DictTest.py
|
halotudio/openPNM-copy2
|
d400ec65e9421256a531f6d22a38255b002d5dcb
|
[
"MIT"
] | null | null | null |
import openpnm as op
from openpnm.io import Dict
import py
import os
class DictTest:
def setup_class(self):
ws = op.Workspace()
ws.settings['local_data'] = True
self.net = op.network.Cubic(shape=[2, 2, 2])
Ps = [0, 1, 2, 3]
Ts = self.net.find_neighbor_throats(pores=Ps)
self.geo_1 = op.geometry.GenericGeometry(network=self.net,
pores=Ps, throats=Ts)
self.geo_1['pore.boo'] = 1
self.geo_1['throat.boo'] = 1
Ps = [4, 5, 6, 7]
Ts = self.net.find_neighbor_throats(pores=Ps, mode='xnor')
self.geo_2 = op.geometry.GenericGeometry(network=self.net,
pores=Ps, throats=Ts)
self.geo_2['pore.boo'] = 1
self.geo_2['throat.boo'] = 1
self.phase_1 = op.phases.GenericPhase(network=self.net)
self.phase_1['pore.bar'] = 2
self.phase_1['throat.bar'] = 2
self.phase_2 = op.phases.GenericPhase(network=self.net)
self.phase_2['pore.bar'] = 2
self.phase_2['throat.bar'] = 2
self.phys_1 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_1,
geometry=self.geo_1)
self.phys_1['pore.baz'] = 11
self.phys_1['throat.baz'] = 11
self.phys_2 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_1,
geometry=self.geo_2)
self.phys_2['pore.baz'] = 12
self.phys_2['throat.baz'] = 12
self.phys_3 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_2,
geometry=self.geo_1)
self.phys_3['pore.baz'] = 21
self.phys_3['throat.baz'] = 21
self.phys_4 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_2,
geometry=self.geo_2)
self.phys_4['pore.baz'] = 22
self.phys_4['throat.baz'] = 22
def teardown_class(self):
ws = op.Workspace()
ws.clear()
def test_to_dict_missing_all_physics(self):
net = op.network.Cubic(shape=[4, 4, 4])
op.geometry.GenericGeometry(network=net, pores=net.Ps, throats=net.Ts)
phase = op.phases.GenericPhase(network=net)
Dict.to_dict(network=net, phases=[phase], flatten=True,
interleave=True, categorize_by=[])
def test_to_dict_flattened_interleaved(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=True, interleave=True, categorize_by=[])
a = set(['net_01', 'phase_01', 'phase_02'])
assert a == set(D.keys())
assert set(['geo_01', 'geo_02']).isdisjoint(D['net_01'].keys())
assert set(['phys_01', 'phys_02']).isdisjoint(D['phase_01'].keys())
assert set(['phys_03', 'phys_04']).isdisjoint(D['phase_02'].keys())
def test_to_dict_flattened_not_interleaved(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=True, interleave=False, categorize_by=[])
a = set([i.name for i in self.net.project])
assert a == set(D.keys())
assert set(['geo_01', 'geo_02']).isdisjoint(D['net_01'].keys())
assert set(['phys_01', 'phys_02']).isdisjoint(D['phase_01'].keys())
assert set(['phys_03', 'phys_04']).isdisjoint(D['phase_02'].keys())
def test_to_dict_not_flattened_interleaved(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=True, categorize_by=[])
a = set(['net_01', 'phase_01', 'phase_02'])
assert a == set(D.keys())
assert set(['geo_01', 'geo_02']).isdisjoint(D['net_01'].keys())
assert set(['phys_01', 'phys_02']).isdisjoint(D['phase_01'].keys())
assert set(['phys_03', 'phys_04']).isdisjoint(D['phase_02'].keys())
def test_to_dict_not_flattened_not_interleaved(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=False, categorize_by=[])
_ = set(['network', 'phase', 'physics', 'geometry'])
b = set(['net_01', 'phase_01', 'phase_02'])
_ = set(['labels', 'properties'])
_ = set(['pore', 'throat'])
# Ensure NOT categorized by object
assert b == set(D.keys())
# Ensure NOT flattened
assert set(['geo_01', 'geo_02']).issubset(D['net_01'].keys())
assert set(['phys_01', 'phys_02']).issubset(D['phase_01'].keys())
assert set(['phys_03', 'phys_04']).issubset(D['phase_02'].keys())
# Ensure no cross talk between phases
assert set(['phys_01', 'phys_02']).isdisjoint(D['phase_02'].keys())
assert set(['phys_03', 'phys_04']).isdisjoint(D['phase_01'].keys())
def test_to_dict_not_flat_not_interleaved_categorized_by_object(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=False,
categorize_by=['object'])
_ = set(['network', 'phase', 'physics', 'geometry'])
_ = set(['net_01', 'phase_01', 'phase_02'])
_ = set(['labels', 'properties'])
_ = set(['pore', 'throat'])
e = set(['network', 'phase'])
# Ensure categorized by object
assert e == set(D.keys())
# Ensure flatten, which occurs when categorized by object
keys = D['network']['net_01']['geometry'].keys()
assert set(['geo_01', 'geo_02']).issubset(keys)
keys = D['phase'].keys()
assert set(['phase_01', 'phase_02']).issubset(keys)
keys = D['phase']['phase_01']['physics'].keys()
assert set(['phys_01', 'phys_02']).issubset(keys)
def test_to_dict_not_flat_not_interleaved_categorized_by_data(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=False,
categorize_by=['data'])
_ = set(['network', 'phase', 'physics', 'geometry'])
b = set(['net_01', 'phase_01', 'phase_02'])
c = set(['labels', 'properties'])
_ = set(['pore', 'throat'])
# Ensure NOT categorized by object
assert b == set(D.keys())
# Ensure NOT flattened
assert set(['geo_01', 'geo_02']).issubset(D['net_01'].keys())
assert set(['phys_01', 'phys_02']).issubset(D['phase_01'].keys())
assert set(['phys_03', 'phys_04']).issubset(D['phase_02'].keys())
# Ensure categorized by data
assert c.issubset(D['net_01'].keys())
assert c.issubset(D['phase_01'].keys())
assert c.issubset(D['phase_02'].keys())
assert c.issubset(D['net_01']['geo_01'].keys())
assert c.issubset(D['net_01']['geo_02'].keys())
assert c.issubset(D['phase_01']['phys_01'].keys())
assert c.issubset(D['phase_01']['phys_02'].keys())
assert c.issubset(D['phase_02']['phys_03'].keys())
assert c.issubset(D['phase_02']['phys_04'].keys())
def test_to_dict_not_flat_not_interleaved_categorized_by_element(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=False,
categorize_by=['element'])
_ = set(['network', 'phase', 'physics', 'geometry'])
b = set(['net_01', 'phase_01', 'phase_02'])
_ = set(['labels', 'properties'])
d = set(['pore', 'throat'])
# Ensure NOT categorized by object
assert b == set(D.keys())
# Ensure NOT flattened
assert set(['geo_01', 'geo_02']).issubset(D['net_01'].keys())
assert set(['phys_01', 'phys_02']).issubset(D['phase_01'].keys())
assert set(['phys_03', 'phys_04']).issubset(D['phase_02'].keys())
# Ensure it's categorized by element
assert d.issubset(D['net_01'].keys())
assert d.issubset(D['phase_01'].keys())
assert d.issubset(D['phase_01'].keys())
assert d.issubset(D['phase_02'].keys())
assert d.issubset(D['phase_02'].keys())
assert d.issubset(D['net_01']['geo_01'].keys())
assert d.issubset(D['net_01']['geo_01'].keys())
assert d.issubset(D['net_01']['geo_02'].keys())
assert d.issubset(D['net_01']['geo_02'].keys())
assert d.issubset(D['phase_01']['phys_01'].keys())
assert d.issubset(D['phase_01']['phys_01'].keys())
assert d.issubset(D['phase_01']['phys_02'].keys())
assert d.issubset(D['phase_01']['phys_02'].keys())
assert d.issubset(D['phase_02']['phys_03'].keys())
assert d.issubset(D['phase_02']['phys_03'].keys())
assert d.issubset(D['phase_02']['phys_04'].keys())
assert d.issubset(D['phase_02']['phys_04'].keys())
def test_to_dict_not_flat_not_interleaved_cat_by_element_data(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=False,
categorize_by=['element', 'data'])
_ = set(['network', 'phase', 'physics', 'geometry'])
b = set(['net_01', 'phase_01', 'phase_02'])
_ = set(['labels', 'properties'])
d = set(['pore', 'throat'])
# Ensure NOT categorized by object
assert b == set(D.keys())
# Ensure NOT flattened
assert set(['geo_01', 'geo_02']).issubset(D['net_01'].keys())
assert set(['phys_01', 'phys_02']).issubset(D['phase_01'].keys())
assert set(['phys_03', 'phys_04']).issubset(D['phase_02'].keys())
# Ensure categorized by data and element
assert d.issubset(D['net_01']['properties'].keys())
assert d.issubset(D['net_01']['labels'].keys())
assert d.issubset(D['phase_01']['properties'].keys())
assert d.issubset(D['phase_01']['labels'].keys())
assert d.issubset(D['phase_02']['properties'].keys())
assert d.issubset(D['phase_02']['labels'].keys())
assert d.issubset(D['net_01']['geo_01']['properties'].keys())
assert d.issubset(D['net_01']['geo_01']['labels'].keys())
assert d.issubset(D['net_01']['geo_02']['properties'].keys())
assert d.issubset(D['net_01']['geo_02']['labels'].keys())
assert d.issubset(D['phase_01']['phys_01']['properties'].keys())
assert d.issubset(D['phase_01']['phys_01']['labels'].keys())
assert d.issubset(D['phase_01']['phys_02']['properties'].keys())
assert d.issubset(D['phase_01']['phys_02']['labels'].keys())
assert d.issubset(D['phase_02']['phys_03']['properties'].keys())
assert d.issubset(D['phase_02']['phys_03']['labels'].keys())
assert d.issubset(D['phase_02']['phys_04']['properties'].keys())
assert d.issubset(D['phase_02']['phys_04']['labels'].keys())
def test_to_dict_not_flat_not_interleaved_cat_by_element_data_object(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=False,
categorize_by=['element', 'data', 'object'])
_ = set(['network', 'phase', 'physics', 'geometry'])
_ = set(['net_01', 'phase_01', 'phase_02'])
_ = set(['labels', 'properties'])
d = set(['pore', 'throat'])
e = set(['network', 'phase'])
# Check if categorized by object, but not flattened
assert e == set(D.keys())
assert 'geometry' in D['network']['net_01'].keys()
assert 'physics' in D['phase']['phase_01'].keys()
assert 'physics' in D['phase']['phase_02'].keys()
# Ensure it's categorized by object, data, and element
assert d.issubset(D['network']['net_01']['labels'].keys())
assert d.issubset(D['phase']['phase_01']['properties'].keys())
assert d.issubset(D['phase']['phase_01']['labels'].keys())
assert d.issubset(D['phase']['phase_02']['properties'].keys())
assert d.issubset(D['phase']['phase_02']['labels'].keys())
path = D['network']['net_01']['geometry']['geo_01']['properties']
assert d.issubset(path.keys())
path = D['network']['net_01']['geometry']['geo_01']['labels']
assert d.issubset(path.keys())
path = D['network']['net_01']['geometry']['geo_02']['properties']
assert d.issubset(path.keys())
path = D['network']['net_01']['geometry']['geo_02']['labels']
assert d.issubset(path.keys())
path = D['phase']['phase_01']['physics']['phys_01']['properties']
assert d.issubset(path.keys())
path = D['phase']['phase_01']['physics']['phys_01']['labels']
assert d.issubset(path.keys())
path = D['phase']['phase_01']['physics']['phys_02']['properties']
assert d.issubset(path.keys())
path = D['phase']['phase_01']['physics']['phys_02']['labels']
assert d.issubset(path.keys())
path = D['phase']['phase_02']['physics']['phys_03']['properties']
assert d.issubset(path.keys())
path = D['phase']['phase_02']['physics']['phys_03']['labels']
assert d.issubset(path.keys())
path = D['phase']['phase_02']['physics']['phys_04']['properties']
assert d.issubset(path.keys())
path = D['phase']['phase_02']['physics']['phys_04']['labels']
assert d.issubset(path.keys())
def test_to_dict_not_flat_not_interleaved_cat_by_element_object(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=False,
categorize_by=['element', 'object'])
_ = set(['network', 'phase', 'physics', 'geometry'])
_ = set(['net_01', 'phase_01', 'phase_02'])
_ = set(['labels', 'properties'])
d = set(['pore', 'throat'])
e = set(['network', 'phase'])
# Check if categorized by object
assert e == set(D.keys())
# Check if categorized by object, but not flattened
assert e == set(D.keys())
assert 'geometry' in D['network']['net_01'].keys()
assert 'physics' in D['phase']['phase_01'].keys()
assert 'physics' in D['phase']['phase_02'].keys()
# Ensure it's categorized by element
assert d.issubset(D['network']['net_01'].keys())
assert d.issubset(D['phase']['phase_01'].keys())
assert d.issubset(D['phase']['phase_01'].keys())
assert d.issubset(D['phase']['phase_02'].keys())
assert d.issubset(D['phase']['phase_02'].keys())
assert d.issubset(D['network']['net_01']['geometry']['geo_01'].keys())
assert d.issubset(D['network']['net_01']['geometry']['geo_01'].keys())
assert d.issubset(D['network']['net_01']['geometry']['geo_02'].keys())
assert d.issubset(D['network']['net_01']['geometry']['geo_02'].keys())
assert d.issubset(D['phase']['phase_01']['physics']['phys_01'].keys())
assert d.issubset(D['phase']['phase_01']['physics']['phys_01'].keys())
assert d.issubset(D['phase']['phase_01']['physics']['phys_02'].keys())
assert d.issubset(D['phase']['phase_01']['physics']['phys_02'].keys())
assert d.issubset(D['phase']['phase_02']['physics']['phys_03'].keys())
assert d.issubset(D['phase']['phase_02']['physics']['phys_03'].keys())
assert d.issubset(D['phase']['phase_02']['physics']['phys_04'].keys())
assert d.issubset(D['phase']['phase_02']['physics']['phys_04'].keys())
def test_to_dict_flat_not_interleaved_categorized_by_element(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=True, interleave=False,
categorize_by=['element'])
assert set(D.keys()) == set([i.name for i in self.net.project])
d = set(['pore', 'throat'])
assert d.issubset(D['net_01'].keys())
assert d.issubset(D['geo_01'].keys())
assert d.issubset(D['geo_02'].keys())
assert d.issubset(D['phase_01'].keys())
assert d.issubset(D['phase_02'].keys())
assert d.issubset(D['phys_01'].keys())
assert d.issubset(D['phys_02'].keys())
assert d.issubset(D['phys_03'].keys())
assert d.issubset(D['phys_04'].keys())
def test_to_dict_flat_not_interleaved_categorized_by_data(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=True, interleave=False,
categorize_by=['data'])
assert set(D.keys()) == set([i.name for i in self.net.project])
c = set(['labels', 'properties'])
assert c.issubset(D['net_01'].keys())
assert c.issubset(D['geo_01'].keys())
assert c.issubset(D['geo_02'].keys())
assert c.issubset(D['phase_01'].keys())
assert c.issubset(D['phase_02'].keys())
assert c.issubset(D['phys_01'].keys())
assert c.issubset(D['phys_02'].keys())
assert c.issubset(D['phys_03'].keys())
assert c.issubset(D['phys_04'].keys())
def test_to_dict_flat_not_interleaved_categorized_by_data_element(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=True, interleave=False,
categorize_by=['data', 'element'])
assert set(D.keys()) == set([i.name for i in self.net.project])
d = set(['pore', 'throat'])
assert d.issubset(D['net_01']['labels'].keys())
assert d.issubset(D['net_01']['properties'].keys())
assert d.issubset(D['geo_01']['labels'].keys())
assert d.issubset(D['geo_01']['properties'].keys())
assert d.issubset(D['geo_02']['labels'].keys())
assert d.issubset(D['geo_02']['properties'].keys())
assert d.issubset(D['phase_01']['labels'].keys())
assert d.issubset(D['phase_01']['properties'].keys())
assert d.issubset(D['phase_02']['labels'].keys())
assert d.issubset(D['phase_02']['properties'].keys())
assert d.issubset(D['phys_01']['labels'].keys())
assert d.issubset(D['phys_01']['properties'].keys())
assert d.issubset(D['phys_02']['labels'].keys())
assert d.issubset(D['phys_02']['properties'].keys())
assert d.issubset(D['phys_03']['labels'].keys())
assert d.issubset(D['phys_03']['properties'].keys())
assert d.issubset(D['phys_04']['labels'].keys())
assert d.issubset(D['phys_04']['properties'].keys())
def test_to_dict_interleaved_categorized_by_element(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=True,
categorize_by=['element'])
b = set(['net_01', 'phase_01', 'phase_02'])
assert set(D.keys()) == b
d = set(['pore', 'throat'])
assert d.issubset(D['net_01'].keys())
assert d.issubset(D['phase_01'].keys())
assert d.issubset(D['phase_02'].keys())
def test_to_dict_interleaved_categorized_by_data(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=True,
categorize_by=['data'])
b = set(['net_01', 'phase_01', 'phase_02'])
assert set(D.keys()) == b
d = set(['labels', 'properties'])
assert d.issubset(D['net_01'].keys())
assert d.issubset(D['phase_01'].keys())
assert d.issubset(D['phase_02'].keys())
def test_to_dict_interleaved_categorized_by_data_element(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=True,
categorize_by=['data', 'element'])
b = set(['net_01', 'phase_01', 'phase_02'])
assert set(D.keys()) == b
d = set(['pore', 'throat'])
assert d.issubset(D['net_01']['labels'].keys())
assert d.issubset(D['net_01']['properties'].keys())
assert d.issubset(D['phase_01']['labels'].keys())
assert d.issubset(D['phase_01']['properties'].keys())
assert d.issubset(D['phase_02']['labels'].keys())
assert d.issubset(D['phase_02']['properties'].keys())
def test_to_dict_categorize_by_project(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1, self.phase_2],
flatten=False, interleave=True,
categorize_by=['project'])
assert 'proj_01' in D.keys()
def test_from_dict_interleaved_categorized_by_object(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1],
flatten=False, interleave=True,
categorize_by=['object'])
proj = Dict.from_dict(D)
assert len(proj) == 2
assert len(proj.geometries().values()) == 0
assert len(proj.phases().values()) == 1
assert len(proj.physics().values()) == 0
def test_from_dict_interleaved_not_categorized(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1],
flatten=False, interleave=True,
categorize_by=[])
proj = Dict.from_dict(D)
assert len(proj) == 2
assert len(proj.geometries().values()) == 0
assert len(proj.phases().values()) == 0
assert len(proj.physics().values()) == 0
def test_from_dict_not_interleaved_flatted_categorized_by_object(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1],
flatten=True, interleave=False,
categorize_by=['object'])
proj = Dict.from_dict(D)
assert len(proj) == 6
assert len(proj.geometries().values()) == 2
assert len(proj.phases().values()) == 1
assert len(proj.physics().values()) == 2
def test_from_dict_not_interleaved_not_flatted_categorized_by_object(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1],
flatten=False, interleave=False,
categorize_by=['object'])
proj = Dict.from_dict(D)
assert len(proj) == 6
assert len(proj.geometries().values()) == 2
assert len(proj.phases().values()) == 1
assert len(proj.physics().values()) == 2
def test_from_dict_not_interleaved_not_flatted_cat_by_obj_data_elem(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1],
flatten=False, interleave=False,
categorize_by=['object', 'element', 'data'])
# Ensure that data and element categorizations are ripped out
proj = Dict.from_dict(D)
assert len(proj) == 6
assert len(proj.geometries().values()) == 2
assert len(proj.phases().values()) == 1
assert len(proj.physics().values()) == 2
def test_from_dict_not_interleaved_not_flatted_not_categorized(self):
D = Dict.to_dict(network=self.net, phases=[self.phase_1],
flatten=False, interleave=False,
categorize_by=[])
proj = Dict.from_dict(D)
assert len(proj) == 6
assert len(proj.geometries().values()) == 0
assert len(proj.phases().values()) == 0
assert len(proj.physics().values()) == 0
def test_save_and_load(self, tmpdir):
D = Dict.to_dict(network=self.net, phases=[self.phase_1],
flatten=False, interleave=False,
categorize_by=[])
fname = tmpdir.join('test.dct')
Dict.save(dct=D, filename=fname)
dct = Dict.load(filename=fname)
assert len(dct.keys()) == 2
os.remove(fname)
if __name__ == '__main__':
t = DictTest()
self = t
t.setup_class()
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
try:
t.__getattribute__(item)()
except TypeError:
t.__getattribute__(item)(tmpdir=py.path.local())
| 46.214689
| 79
| 0.571679
| 3,204
| 24,540
| 4.173221
| 0.044944
| 0.097225
| 0.121158
| 0.114875
| 0.919079
| 0.907337
| 0.88662
| 0.864932
| 0.810785
| 0.753721
| 0
| 0.039795
| 0.246333
| 24,540
| 530
| 80
| 46.301887
| 0.683158
| 0.029177
| 0
| 0.616822
| 0
| 0
| 0.163089
| 0
| 0
| 0
| 0
| 0
| 0.469626
| 1
| 0.063084
| false
| 0
| 0.009346
| 0
| 0.074766
| 0.002336
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be054318dac117a82b9f91add8eb647fc88ecf5c
| 82,071
|
py
|
Python
|
velo_payments/api/payees_api.py
|
velopaymentsapi/velo-python
|
59b39555e9714139b4bf697151cc7d15f6dd510e
|
[
"Apache-2.0"
] | null | null | null |
velo_payments/api/payees_api.py
|
velopaymentsapi/velo-python
|
59b39555e9714139b4bf697151cc7d15f6dd510e
|
[
"Apache-2.0"
] | null | null | null |
velo_payments/api/payees_api.py
|
velopaymentsapi/velo-python
|
59b39555e9714139b4bf697151cc7d15f6dd510e
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Velo Payments APIs
## Terms and Definitions Throughout this document and the Velo platform the following terms are used: * **Payor.** An entity (typically a corporation) which wishes to pay funds to one or more payees via a payout. * **Payee.** The recipient of funds paid out by a payor. * **Payment.** A single transfer of funds from a payor to a payee. * **Payout.** A batch of Payments, typically used by a payor to logically group payments (e.g. by business day). Technically there need be no relationship between the payments in a payout - a single payout can contain payments to multiple payees and/or multiple payments to a single payee. * **Sandbox.** An integration environment provided by Velo Payments which offers a similar API experience to the production environment, but all funding and payment events are simulated, along with many other services such as OFAC sanctions list checking. ## Overview The Velo Payments API allows a payor to perform a number of operations. The following is a list of the main capabilities in a natural order of execution: * Authenticate with the Velo platform * Maintain a collection of payees * Query the payor’s current balance of funds within the platform and perform additional funding * Issue payments to payees * Query the platform for a history of those payments This document describes the main concepts and APIs required to get up and running with the Velo Payments platform. It is not an exhaustive API reference. For that, please see the separate Velo Payments API Reference. ## API Considerations The Velo Payments API is REST based and uses the JSON format for requests and responses. Most calls are secured using OAuth 2 security and require a valid authentication access token for successful operation. See the Authentication section for details. Where a dynamic value is required in the examples below, the {token} format is used, suggesting that the caller needs to supply the appropriate value of the token in question (without including the { or } characters). Where curl examples are given, the –d @filename.json approach is used, indicating that the request body should be placed into a file named filename.json in the current directory. Each of the curl examples in this document should be considered a single line on the command-line, regardless of how they appear in print. ## Authenticating with the Velo Platform Once Velo backoffice staff have added your organization as a payor within the Velo platform sandbox, they will create you a payor Id, an API key and an API secret and share these with you in a secure manner. You will need to use these values to authenticate with the Velo platform in order to gain access to the APIs. The steps to take are explained in the following: create a string comprising the API key (e.g. 44a9537d-d55d-4b47-8082-14061c2bcdd8) and API secret (e.g. c396b26b-137a-44fd-87f5-34631f8fd529) with a colon between them. E.g. 44a9537d-d55d-4b47-8082-14061c2bcdd8:c396b26b-137a-44fd-87f5-34631f8fd529 base64 encode this string. E.g.: NDRhOTUzN2QtZDU1ZC00YjQ3LTgwODItMTQwNjFjMmJjZGQ4OmMzOTZiMjZiLTEzN2EtNDRmZC04N2Y1LTM0NjMxZjhmZDUyOQ== create an HTTP **Authorization** header with the value set to e.g. Basic NDRhOTUzN2QtZDU1ZC00YjQ3LTgwODItMTQwNjFjMmJjZGQ4OmMzOTZiMjZiLTEzN2EtNDRmZC04N2Y1LTM0NjMxZjhmZDUyOQ== perform the Velo authentication REST call using the HTTP header created above e.g. via curl: ``` curl -X POST \\ -H \"Content-Type: application/json\" \\ -H \"Authorization: Basic NDRhOTUzN2QtZDU1ZC00YjQ3LTgwODItMTQwNjFjMmJjZGQ4OmMzOTZiMjZiLTEzN2EtNDRmZC04N2Y1LTM0NjMxZjhmZDUyOQ==\" \\ 'https://api.sandbox.velopayments.com/v1/authenticate?grant_type=client_credentials' ``` If successful, this call will result in a **200** HTTP status code and a response body such as: ``` { \"access_token\":\"19f6bafd-93fd-4747-b229-00507bbc991f\", \"token_type\":\"bearer\", \"expires_in\":1799, \"scope\":\"...\" } ``` ## API access following authentication Following successful authentication, the value of the access_token field in the response (indicated in green above) should then be presented with all subsequent API calls to allow the Velo platform to validate that the caller is authenticated. This is achieved by setting the HTTP Authorization header with the value set to e.g. Bearer 19f6bafd-93fd-4747-b229-00507bbc991f such as the curl example below: ``` -H \"Authorization: Bearer 19f6bafd-93fd-4747-b229-00507bbc991f \" ``` If you make other Velo API calls which require authorization but the Authorization header is missing or invalid then you will get a **401** HTTP status response. # noqa: E501
The version of the OpenAPI document: 2.26.124
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from velo_payments.api_client import ApiClient
from velo_payments.exceptions import (
ApiTypeError,
ApiValueError
)
class PayeesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_payee_by_id_v3(self, payee_id, **kwargs): # noqa: E501
"""Delete Payee by Id # noqa: E501
<p>Use v4 instead</p> <p>This API will delete Payee by Id (UUID). Deletion by ID is not allowed if:</p> <p>* Payee ID is not found</p> <p>* If Payee has not been on-boarded</p> <p>* If Payee is in grace period</p> <p>* If Payee has existing payments</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_payee_by_id_v3(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_payee_by_id_v3_with_http_info(payee_id, **kwargs) # noqa: E501
def delete_payee_by_id_v3_with_http_info(self, payee_id, **kwargs): # noqa: E501
"""Delete Payee by Id # noqa: E501
<p>Use v4 instead</p> <p>This API will delete Payee by Id (UUID). Deletion by ID is not allowed if:</p> <p>* Payee ID is not found</p> <p>* If Payee has not been on-boarded</p> <p>* If Payee is in grace period</p> <p>* If Payee has existing payments</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_payee_by_id_v3_with_http_info(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_payee_by_id_v3" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `delete_payee_by_id_v3`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v3/payees/{payeeId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_payee_by_id_v4(self, payee_id, **kwargs): # noqa: E501
"""Delete Payee by Id # noqa: E501
<p>This API will delete Payee by Id (UUID). Deletion by ID is not allowed if:</p> <p>* Payee ID is not found</p> <p>* If Payee has not been on-boarded</p> <p>* If Payee is in grace period</p> <p>* If Payee has existing payments</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_payee_by_id_v4(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_payee_by_id_v4_with_http_info(payee_id, **kwargs) # noqa: E501
def delete_payee_by_id_v4_with_http_info(self, payee_id, **kwargs): # noqa: E501
"""Delete Payee by Id # noqa: E501
<p>This API will delete Payee by Id (UUID). Deletion by ID is not allowed if:</p> <p>* Payee ID is not found</p> <p>* If Payee has not been on-boarded</p> <p>* If Payee is in grace period</p> <p>* If Payee has existing payments</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_payee_by_id_v4_with_http_info(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_payee_by_id_v4" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `delete_payee_by_id_v4`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v4/payees/{payeeId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_payee_by_id_v3(self, payee_id, **kwargs): # noqa: E501
"""Get Payee by Id # noqa: E501
<p>Use v4 instead</p> <p>Get Payee by Id</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_payee_by_id_v3(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param bool sensitive: Optional. If omitted or set to false, any Personal Identifiable Information (PII) values are returned masked. If set to true, and you have permission, the PII values will be returned as their original unmasked values.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PayeeDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_payee_by_id_v3_with_http_info(payee_id, **kwargs) # noqa: E501
def get_payee_by_id_v3_with_http_info(self, payee_id, **kwargs): # noqa: E501
"""Get Payee by Id # noqa: E501
<p>Use v4 instead</p> <p>Get Payee by Id</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_payee_by_id_v3_with_http_info(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param bool sensitive: Optional. If omitted or set to false, any Personal Identifiable Information (PII) values are returned masked. If set to true, and you have permission, the PII values will be returned as their original unmasked values.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PayeeDetailResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id', 'sensitive'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_payee_by_id_v3" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `get_payee_by_id_v3`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
if 'sensitive' in local_var_params:
query_params.append(('sensitive', local_var_params['sensitive'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v3/payees/{payeeId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PayeeDetailResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_payee_by_id_v4(self, payee_id, **kwargs): # noqa: E501
"""Get Payee by Id # noqa: E501
Get Payee by Id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_payee_by_id_v4(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param bool sensitive: Optional. If omitted or set to false, any Personal Identifiable Information (PII) values are returned masked. If set to true, and you have permission, the PII values will be returned as their original unmasked values.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PayeeDetailResponse2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_payee_by_id_v4_with_http_info(payee_id, **kwargs) # noqa: E501
def get_payee_by_id_v4_with_http_info(self, payee_id, **kwargs): # noqa: E501
"""Get Payee by Id # noqa: E501
Get Payee by Id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_payee_by_id_v4_with_http_info(payee_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param bool sensitive: Optional. If omitted or set to false, any Personal Identifiable Information (PII) values are returned masked. If set to true, and you have permission, the PII values will be returned as their original unmasked values.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PayeeDetailResponse2, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id', 'sensitive'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_payee_by_id_v4" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `get_payee_by_id_v4`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
if 'sensitive' in local_var_params:
query_params.append(('sensitive', local_var_params['sensitive'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v4/payees/{payeeId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PayeeDetailResponse2', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_payee_changes_v3(self, payor_id, updated_since, **kwargs): # noqa: E501
"""List Payee Changes # noqa: E501
<p>Use v4 instead</p> <p>Get a paginated response listing payee changes.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payee_changes_v3(payor_id, updated_since, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The Payor ID to find associated Payees (required)
:param datetime updated_since: The updatedSince filter in the format YYYY-MM-DDThh:mm:ss+hh:mm (required)
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 100. Max allowable is 1000.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PayeeDeltaResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_payee_changes_v3_with_http_info(payor_id, updated_since, **kwargs) # noqa: E501
def list_payee_changes_v3_with_http_info(self, payor_id, updated_since, **kwargs): # noqa: E501
"""List Payee Changes # noqa: E501
<p>Use v4 instead</p> <p>Get a paginated response listing payee changes.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payee_changes_v3_with_http_info(payor_id, updated_since, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The Payor ID to find associated Payees (required)
:param datetime updated_since: The updatedSince filter in the format YYYY-MM-DDThh:mm:ss+hh:mm (required)
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 100. Max allowable is 1000.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PayeeDeltaResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payor_id', 'updated_since', 'page', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_payee_changes_v3" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payor_id' is set
if ('payor_id' not in local_var_params or
local_var_params['payor_id'] is None):
raise ApiValueError("Missing the required parameter `payor_id` when calling `list_payee_changes_v3`") # noqa: E501
# verify the required parameter 'updated_since' is set
if ('updated_since' not in local_var_params or
local_var_params['updated_since'] is None):
raise ApiValueError("Missing the required parameter `updated_since` when calling `list_payee_changes_v3`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'payor_id' in local_var_params:
query_params.append(('payorId', local_var_params['payor_id'])) # noqa: E501
if 'updated_since' in local_var_params:
query_params.append(('updatedSince', local_var_params['updated_since'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'page_size' in local_var_params:
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v3/payees/deltas', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PayeeDeltaResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_payee_changes_v4(self, payor_id, updated_since, **kwargs): # noqa: E501
"""List Payee Changes # noqa: E501
Get a paginated response listing payee changes (updated since a particular time) to a limited set of fields: - dbaName - displayName - email - onboardedStatus - payeeCountry - payeeId - remoteId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payee_changes_v4(payor_id, updated_since, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The Payor ID to find associated Payees (required)
:param datetime updated_since: The updatedSince filter in the format YYYY-MM-DDThh:mm:ss+hh:mm (required)
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 100. Max allowable is 1000.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PayeeDeltaResponse2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_payee_changes_v4_with_http_info(payor_id, updated_since, **kwargs) # noqa: E501
def list_payee_changes_v4_with_http_info(self, payor_id, updated_since, **kwargs): # noqa: E501
"""List Payee Changes # noqa: E501
Get a paginated response listing payee changes (updated since a particular time) to a limited set of fields: - dbaName - displayName - email - onboardedStatus - payeeCountry - payeeId - remoteId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payee_changes_v4_with_http_info(payor_id, updated_since, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The Payor ID to find associated Payees (required)
:param datetime updated_since: The updatedSince filter in the format YYYY-MM-DDThh:mm:ss+hh:mm (required)
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 100. Max allowable is 1000.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PayeeDeltaResponse2, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payor_id', 'updated_since', 'page', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_payee_changes_v4" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payor_id' is set
if ('payor_id' not in local_var_params or
local_var_params['payor_id'] is None):
raise ApiValueError("Missing the required parameter `payor_id` when calling `list_payee_changes_v4`") # noqa: E501
# verify the required parameter 'updated_since' is set
if ('updated_since' not in local_var_params or
local_var_params['updated_since'] is None):
raise ApiValueError("Missing the required parameter `updated_since` when calling `list_payee_changes_v4`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'payor_id' in local_var_params:
query_params.append(('payorId', local_var_params['payor_id'])) # noqa: E501
if 'updated_since' in local_var_params:
query_params.append(('updatedSince', local_var_params['updated_since'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'page_size' in local_var_params:
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v4/payees/deltas', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PayeeDeltaResponse2', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_payees_v3(self, payor_id, **kwargs): # noqa: E501
"""List Payees # noqa: E501
<p>Use v4 instead</p> Get a paginated response listing the payees for a payor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payees_v3(payor_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The account owner Payor ID (required)
:param WatchlistStatus watchlist_status: The watchlistStatus of the payees.
:param bool disabled: Payee disabled
:param OnboardedStatus onboarded_status: The onboarded status of the payees.
:param str email: Email address
:param str display_name: The display name of the payees.
:param str remote_id: The remote id of the payees.
:param PayeeType payee_type: The onboarded status of the payees.
:param str payee_country: The country of the payee - 2 letter ISO 3166-1 country code (upper case)
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 25. Max allowable is 100.
:param str sort: List of sort fields (e.g. ?sort=onboardedStatus:asc,name:asc) Default is name:asc 'name' is treated as company name for companies - last name + ',' + firstName for individuals The supported sort fields are - payeeId, displayName, payoutStatus, onboardedStatus.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PagedPayeeResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_payees_v3_with_http_info(payor_id, **kwargs) # noqa: E501
def list_payees_v3_with_http_info(self, payor_id, **kwargs): # noqa: E501
"""List Payees # noqa: E501
<p>Use v4 instead</p> Get a paginated response listing the payees for a payor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payees_v3_with_http_info(payor_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The account owner Payor ID (required)
:param WatchlistStatus watchlist_status: The watchlistStatus of the payees.
:param bool disabled: Payee disabled
:param OnboardedStatus onboarded_status: The onboarded status of the payees.
:param str email: Email address
:param str display_name: The display name of the payees.
:param str remote_id: The remote id of the payees.
:param PayeeType payee_type: The onboarded status of the payees.
:param str payee_country: The country of the payee - 2 letter ISO 3166-1 country code (upper case)
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 25. Max allowable is 100.
:param str sort: List of sort fields (e.g. ?sort=onboardedStatus:asc,name:asc) Default is name:asc 'name' is treated as company name for companies - last name + ',' + firstName for individuals The supported sort fields are - payeeId, displayName, payoutStatus, onboardedStatus.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PagedPayeeResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payor_id', 'watchlist_status', 'disabled', 'onboarded_status', 'email', 'display_name', 'remote_id', 'payee_type', 'payee_country', 'page', 'page_size', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_payees_v3" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payor_id' is set
if ('payor_id' not in local_var_params or
local_var_params['payor_id'] is None):
raise ApiValueError("Missing the required parameter `payor_id` when calling `list_payees_v3`") # noqa: E501
if 'sort' in local_var_params and not re.search(r'[a-zA-Z]+[:desc|:asc]', local_var_params['sort']): # noqa: E501
raise ApiValueError("Invalid value for parameter `sort` when calling `list_payees_v3`, must conform to the pattern `/[a-zA-Z]+[:desc|:asc]/`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'payor_id' in local_var_params:
query_params.append(('payorId', local_var_params['payor_id'])) # noqa: E501
if 'watchlist_status' in local_var_params:
query_params.append(('watchlistStatus', local_var_params['watchlist_status'])) # noqa: E501
if 'disabled' in local_var_params:
query_params.append(('disabled', local_var_params['disabled'])) # noqa: E501
if 'onboarded_status' in local_var_params:
query_params.append(('onboardedStatus', local_var_params['onboarded_status'])) # noqa: E501
if 'email' in local_var_params:
query_params.append(('email', local_var_params['email'])) # noqa: E501
if 'display_name' in local_var_params:
query_params.append(('displayName', local_var_params['display_name'])) # noqa: E501
if 'remote_id' in local_var_params:
query_params.append(('remoteId', local_var_params['remote_id'])) # noqa: E501
if 'payee_type' in local_var_params:
query_params.append(('payeeType', local_var_params['payee_type'])) # noqa: E501
if 'payee_country' in local_var_params:
query_params.append(('payeeCountry', local_var_params['payee_country'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'page_size' in local_var_params:
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
if 'sort' in local_var_params:
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v3/payees', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedPayeeResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_payees_v4(self, payor_id, **kwargs): # noqa: E501
"""List Payees # noqa: E501
Get a paginated response listing the payees for a payor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payees_v4(payor_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The account owner Payor ID (required)
:param WatchlistStatus watchlist_status: The watchlistStatus of the payees.
:param bool disabled: Payee disabled
:param OnboardedStatus onboarded_status: The onboarded status of the payees.
:param str email: Email address
:param str display_name: The display name of the payees.
:param str remote_id: The remote id of the payees.
:param PayeeType payee_type: The onboarded status of the payees.
:param str payee_country: The country of the payee - 2 letter ISO 3166-1 country code (upper case)
:param OfacStatus ofac_status: The ofacStatus of the payees.
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 25. Max allowable is 100.
:param str sort: List of sort fields (e.g. ?sort=onboardedStatus:asc,name:asc) Default is name:asc 'name' is treated as company name for companies - last name + ',' + firstName for individuals The supported sort fields are - payeeId, displayName, payoutStatus, onboardedStatus.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PagedPayeeResponse2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_payees_v4_with_http_info(payor_id, **kwargs) # noqa: E501
def list_payees_v4_with_http_info(self, payor_id, **kwargs): # noqa: E501
"""List Payees # noqa: E501
Get a paginated response listing the payees for a payor. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_payees_v4_with_http_info(payor_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payor_id: The account owner Payor ID (required)
:param WatchlistStatus watchlist_status: The watchlistStatus of the payees.
:param bool disabled: Payee disabled
:param OnboardedStatus onboarded_status: The onboarded status of the payees.
:param str email: Email address
:param str display_name: The display name of the payees.
:param str remote_id: The remote id of the payees.
:param PayeeType payee_type: The onboarded status of the payees.
:param str payee_country: The country of the payee - 2 letter ISO 3166-1 country code (upper case)
:param OfacStatus ofac_status: The ofacStatus of the payees.
:param int page: Page number. Default is 1.
:param int page_size: Page size. Default is 25. Max allowable is 100.
:param str sort: List of sort fields (e.g. ?sort=onboardedStatus:asc,name:asc) Default is name:asc 'name' is treated as company name for companies - last name + ',' + firstName for individuals The supported sort fields are - payeeId, displayName, payoutStatus, onboardedStatus.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PagedPayeeResponse2, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payor_id', 'watchlist_status', 'disabled', 'onboarded_status', 'email', 'display_name', 'remote_id', 'payee_type', 'payee_country', 'ofac_status', 'page', 'page_size', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_payees_v4" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payor_id' is set
if ('payor_id' not in local_var_params or
local_var_params['payor_id'] is None):
raise ApiValueError("Missing the required parameter `payor_id` when calling `list_payees_v4`") # noqa: E501
if 'sort' in local_var_params and not re.search(r'[a-zA-Z]+[:desc|:asc]', local_var_params['sort']): # noqa: E501
raise ApiValueError("Invalid value for parameter `sort` when calling `list_payees_v4`, must conform to the pattern `/[a-zA-Z]+[:desc|:asc]/`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'payor_id' in local_var_params:
query_params.append(('payorId', local_var_params['payor_id'])) # noqa: E501
if 'watchlist_status' in local_var_params:
query_params.append(('watchlistStatus', local_var_params['watchlist_status'])) # noqa: E501
if 'disabled' in local_var_params:
query_params.append(('disabled', local_var_params['disabled'])) # noqa: E501
if 'onboarded_status' in local_var_params:
query_params.append(('onboardedStatus', local_var_params['onboarded_status'])) # noqa: E501
if 'email' in local_var_params:
query_params.append(('email', local_var_params['email'])) # noqa: E501
if 'display_name' in local_var_params:
query_params.append(('displayName', local_var_params['display_name'])) # noqa: E501
if 'remote_id' in local_var_params:
query_params.append(('remoteId', local_var_params['remote_id'])) # noqa: E501
if 'payee_type' in local_var_params:
query_params.append(('payeeType', local_var_params['payee_type'])) # noqa: E501
if 'payee_country' in local_var_params:
query_params.append(('payeeCountry', local_var_params['payee_country'])) # noqa: E501
if 'ofac_status' in local_var_params:
query_params.append(('ofacStatus', local_var_params['ofac_status'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'page_size' in local_var_params:
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
if 'sort' in local_var_params:
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v4/payees', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedPayeeResponse2', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def payee_details_update_v3(self, payee_id, update_payee_details_request, **kwargs): # noqa: E501
"""Update Payee Details # noqa: E501
<p>Use v4 instead</p> <p>Update payee details for the given Payee Id.<p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.payee_details_update_v3(payee_id, update_payee_details_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdatePayeeDetailsRequest update_payee_details_request: Request to update payee details (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.payee_details_update_v3_with_http_info(payee_id, update_payee_details_request, **kwargs) # noqa: E501
def payee_details_update_v3_with_http_info(self, payee_id, update_payee_details_request, **kwargs): # noqa: E501
"""Update Payee Details # noqa: E501
<p>Use v4 instead</p> <p>Update payee details for the given Payee Id.<p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.payee_details_update_v3_with_http_info(payee_id, update_payee_details_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdatePayeeDetailsRequest update_payee_details_request: Request to update payee details (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id', 'update_payee_details_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method payee_details_update_v3" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `payee_details_update_v3`") # noqa: E501
# verify the required parameter 'update_payee_details_request' is set
if ('update_payee_details_request' not in local_var_params or
local_var_params['update_payee_details_request'] is None):
raise ApiValueError("Missing the required parameter `update_payee_details_request` when calling `payee_details_update_v3`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_payee_details_request' in local_var_params:
body_params = local_var_params['update_payee_details_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v3/payees/{payeeId}/payeeDetailsUpdate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def payee_details_update_v4(self, payee_id, update_payee_details_request2, **kwargs): # noqa: E501
"""Update Payee Details # noqa: E501
<p>Update payee details for the given Payee Id.<p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.payee_details_update_v4(payee_id, update_payee_details_request2, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdatePayeeDetailsRequest2 update_payee_details_request2: Request to update payee details (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.payee_details_update_v4_with_http_info(payee_id, update_payee_details_request2, **kwargs) # noqa: E501
def payee_details_update_v4_with_http_info(self, payee_id, update_payee_details_request2, **kwargs): # noqa: E501
"""Update Payee Details # noqa: E501
<p>Update payee details for the given Payee Id.<p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.payee_details_update_v4_with_http_info(payee_id, update_payee_details_request2, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdatePayeeDetailsRequest2 update_payee_details_request2: Request to update payee details (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id', 'update_payee_details_request2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method payee_details_update_v4" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `payee_details_update_v4`") # noqa: E501
# verify the required parameter 'update_payee_details_request2' is set
if ('update_payee_details_request2' not in local_var_params or
local_var_params['update_payee_details_request2'] is None):
raise ApiValueError("Missing the required parameter `update_payee_details_request2` when calling `payee_details_update_v4`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_payee_details_request2' in local_var_params:
body_params = local_var_params['update_payee_details_request2']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v4/payees/{payeeId}/payeeDetailsUpdate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v3_payees_payee_id_remote_id_update_post(self, payee_id, update_remote_id_request, **kwargs): # noqa: E501
"""Update Payee Remote Id # noqa: E501
<p>Use v4 instead</p> <p>Update the remote Id for the given Payee Id.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v3_payees_payee_id_remote_id_update_post(payee_id, update_remote_id_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdateRemoteIdRequest update_remote_id_request: Request to update payee remote id v3 (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v3_payees_payee_id_remote_id_update_post_with_http_info(payee_id, update_remote_id_request, **kwargs) # noqa: E501
def v3_payees_payee_id_remote_id_update_post_with_http_info(self, payee_id, update_remote_id_request, **kwargs): # noqa: E501
"""Update Payee Remote Id # noqa: E501
<p>Use v4 instead</p> <p>Update the remote Id for the given Payee Id.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v3_payees_payee_id_remote_id_update_post_with_http_info(payee_id, update_remote_id_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdateRemoteIdRequest update_remote_id_request: Request to update payee remote id v3 (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id', 'update_remote_id_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v3_payees_payee_id_remote_id_update_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `v3_payees_payee_id_remote_id_update_post`") # noqa: E501
# verify the required parameter 'update_remote_id_request' is set
if ('update_remote_id_request' not in local_var_params or
local_var_params['update_remote_id_request'] is None):
raise ApiValueError("Missing the required parameter `update_remote_id_request` when calling `v3_payees_payee_id_remote_id_update_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_remote_id_request' in local_var_params:
body_params = local_var_params['update_remote_id_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v3/payees/{payeeId}/remoteIdUpdate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def v4_payees_payee_id_remote_id_update_post(self, payee_id, update_remote_id_request2, **kwargs): # noqa: E501
"""Update Payee Remote Id # noqa: E501
<p>Update the remote Id for the given Payee Id.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v4_payees_payee_id_remote_id_update_post(payee_id, update_remote_id_request2, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdateRemoteIdRequest2 update_remote_id_request2: Request to update payee remote id v4 (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.v4_payees_payee_id_remote_id_update_post_with_http_info(payee_id, update_remote_id_request2, **kwargs) # noqa: E501
def v4_payees_payee_id_remote_id_update_post_with_http_info(self, payee_id, update_remote_id_request2, **kwargs): # noqa: E501
"""Update Payee Remote Id # noqa: E501
<p>Update the remote Id for the given Payee Id.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v4_payees_payee_id_remote_id_update_post_with_http_info(payee_id, update_remote_id_request2, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str payee_id: The UUID of the payee. (required)
:param UpdateRemoteIdRequest2 update_remote_id_request2: Request to update payee remote id v4 (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['payee_id', 'update_remote_id_request2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method v4_payees_payee_id_remote_id_update_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'payee_id' is set
if ('payee_id' not in local_var_params or
local_var_params['payee_id'] is None):
raise ApiValueError("Missing the required parameter `payee_id` when calling `v4_payees_payee_id_remote_id_update_post`") # noqa: E501
# verify the required parameter 'update_remote_id_request2' is set
if ('update_remote_id_request2' not in local_var_params or
local_var_params['update_remote_id_request2'] is None):
raise ApiValueError("Missing the required parameter `update_remote_id_request2` when calling `v4_payees_payee_id_remote_id_update_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'payee_id' in local_var_params:
path_params['payeeId'] = local_var_params['payee_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_remote_id_request2' in local_var_params:
body_params = local_var_params['update_remote_id_request2']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/v4/payees/{payeeId}/remoteIdUpdate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 54.351656
| 4,651
| 0.636217
| 10,120
| 82,071
| 4.917292
| 0.049308
| 0.040834
| 0.064707
| 0.021542
| 0.916685
| 0.913189
| 0.909511
| 0.905854
| 0.903503
| 0.899604
| 0
| 0.019416
| 0.287739
| 82,071
| 1,509
| 4,652
| 54.387674
| 0.831856
| 0.505708
| 0
| 0.80292
| 0
| 0.00292
| 0.216657
| 0.063778
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036496
| false
| 0
| 0.007299
| 0
| 0.080292
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
076c044c9a22bd30d8c18bb2663e08999df8b013
| 158
|
py
|
Python
|
rapidpro_webhooks/apps/places/__init__.py
|
unicef/rapidpro-webhooks
|
773004eaae3a91ec6ac4653318bf8eff1c4b642d
|
[
"MIT"
] | null | null | null |
rapidpro_webhooks/apps/places/__init__.py
|
unicef/rapidpro-webhooks
|
773004eaae3a91ec6ac4653318bf8eff1c4b642d
|
[
"MIT"
] | null | null | null |
rapidpro_webhooks/apps/places/__init__.py
|
unicef/rapidpro-webhooks
|
773004eaae3a91ec6ac4653318bf8eff1c4b642d
|
[
"MIT"
] | null | null | null |
from rapidpro_webhooks.apps.places.views import * # noqa
from .attributes import * # noqa
from .entities import * # noqa
from .reconcile import * # noqa
| 26.333333
| 57
| 0.727848
| 20
| 158
| 5.7
| 0.55
| 0.350877
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183544
| 158
| 5
| 58
| 31.6
| 0.883721
| 0.120253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0789ddcc71c0a8aed35c00e7f8d72a7bb072e73c
| 134
|
py
|
Python
|
intervul/datFiles/general_data/properties/__init__.py
|
mpacheco62/intervul
|
c0eaadf54580de4b3c2dea46e8f196eab52280e1
|
[
"MIT"
] | 1
|
2021-04-13T13:28:16.000Z
|
2021-04-13T13:28:16.000Z
|
intervul/datFiles/general_data/properties/__init__.py
|
andresutrera/intervul
|
75c5f824067549b3ddcbe9fe667964fb85a05ce3
|
[
"MIT"
] | null | null | null |
intervul/datFiles/general_data/properties/__init__.py
|
andresutrera/intervul
|
75c5f824067549b3ddcbe9fe667964fb85a05ce3
|
[
"MIT"
] | 1
|
2021-05-06T20:29:42.000Z
|
2021-05-06T20:29:42.000Z
|
from ._materials import Materials
from . import materials
from._material_system_of_coordinates import Material_system_of_coordinates
| 26.8
| 74
| 0.88806
| 17
| 134
| 6.529412
| 0.411765
| 0.27027
| 0.342342
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 134
| 4
| 75
| 33.5
| 0.909836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
07b25384128fdc00550ca5d3d615b5032975c720
| 89
|
py
|
Python
|
fastiqa/dev.py
|
baidut/PatchVQ
|
040486b6342dfd36695f1daea0b5c4d77d728a23
|
[
"Unlicense"
] | 32
|
2020-12-05T09:11:20.000Z
|
2022-03-28T07:49:13.000Z
|
fastiqa/dev.py
|
utlive/PatchVQ
|
040486b6342dfd36695f1daea0b5c4d77d728a23
|
[
"Unlicense"
] | 5
|
2021-07-12T19:43:51.000Z
|
2022-01-28T13:16:16.000Z
|
fastiqa/dev.py
|
utlive/PatchVQ
|
040486b6342dfd36695f1daea0b5c4d77d728a23
|
[
"Unlicense"
] | 7
|
2020-12-29T21:52:07.000Z
|
2022-03-18T15:12:50.000Z
|
from fastiqa_dev.models.rnn_head import *
from fastiqa_dev.models.seq_body_head import *
| 29.666667
| 46
| 0.842697
| 15
| 89
| 4.666667
| 0.6
| 0.314286
| 0.4
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 89
| 2
| 47
| 44.5
| 0.864198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
07c76419b887953a3ce261d6d7c163eb8140e8bc
| 2,273
|
py
|
Python
|
src/utility.py
|
Jin0331/deeptrio
|
cd8be1bfcf669456a50a0f82030abe10ef3da1a4
|
[
"MIT"
] | null | null | null |
src/utility.py
|
Jin0331/deeptrio
|
cd8be1bfcf669456a50a0f82030abe10ef3da1a4
|
[
"MIT"
] | null | null | null |
src/utility.py
|
Jin0331/deeptrio
|
cd8be1bfcf669456a50a0f82030abe10ef3da1a4
|
[
"MIT"
] | null | null | null |
import numpy as np
from sklearn.model_selection import KFold, ShuffleSplit
def random_arr(arr):
np.random.seed(5)
np.random.shuffle(arr)
np.random.seed(55)
np.random.shuffle(arr)
def array_split(num, arr_ppi_1, arr_ppi_2, arr_ppi_y, arr_single_1, arr_single_2, arr_single_y, fix_text):
x_a_1 = {}
x_t_1 = {}
x_a_2 = {}
x_t_2 = {}
y_a = {}
y_t_s = {}
h_a_t = {}
h_r_t = {}
kfnum = 0
spl_num = num
kf = KFold(n_splits=spl_num, shuffle=True, random_state=5)
for train_index, test_index in kf.split(arr_ppi_y):
x_a_1[kfnum], x_t_1[kfnum] = arr_ppi_1[train_index], arr_ppi_1[test_index]
x_a_2[kfnum], x_t_2[kfnum] = arr_ppi_2[train_index], arr_ppi_2[test_index]
y_a[kfnum], y_t_s[kfnum] = arr_ppi_y[train_index], arr_ppi_y[test_index]
h_a_t[kfnum], h_r_t[kfnum] = fix_text[train_index], fix_text[test_index]
kfnum += 1
for ks in range(spl_num):
x_a_1[ks] = np.concatenate([arr_single_1, x_a_1[ks]], 0)
x_a_2[ks] = np.concatenate([arr_single_2, x_a_2[ks]], 0)
y_a[ks] = np.concatenate([arr_single_y, y_a[ks]], 0)
random_arr(x_a_1[ks])
random_arr(x_a_2[ks])
random_arr(y_a[ks])
random_arr(h_a_t[ks])
return x_a_1, x_a_2, y_a, x_t_1, x_t_2, y_t_s, h_a_t, h_r_t
def array_split_prose(num, arr_ppi_1, arr_ppi_2, arr_ppi_y, fix_text):
x_a_1 = {}
x_t_1 = {}
x_a_2 = {}
x_t_2 = {}
y_a = {}
y_t_s = {}
h_a_t = {}
h_r_t = {}
kfnum = 0
spl_num = num
kf = KFold(n_splits=spl_num, shuffle=True, random_state=5)
for train_index, test_index in kf.split(arr_ppi_y):
x_a_1[kfnum], x_t_1[kfnum] = arr_ppi_1[train_index], arr_ppi_1[test_index]
x_a_2[kfnum], x_t_2[kfnum] = arr_ppi_2[train_index], arr_ppi_2[test_index]
y_a[kfnum], y_t_s[kfnum] = arr_ppi_y[train_index], arr_ppi_y[test_index]
h_a_t[kfnum], h_r_t[kfnum] = fix_text[train_index], fix_text[test_index]
kfnum += 1
for ks in range(spl_num):
random_arr(x_a_1[ks])
random_arr(x_a_2[ks])
random_arr(y_a[ks])
random_arr(h_a_t[ks])
return x_a_1, x_a_2, y_a, x_t_1, x_t_2, y_t_s, h_a_t, h_r_t
| 28.4125
| 106
| 0.626045
| 459
| 2,273
| 2.631808
| 0.111111
| 0.099338
| 0.024834
| 0.07947
| 0.81043
| 0.750828
| 0.750828
| 0.750828
| 0.750828
| 0.750828
| 0
| 0.034904
| 0.243731
| 2,273
| 79
| 107
| 28.772152
| 0.66783
| 0
| 0
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.034483
| 0
| 0.12069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6afdfe6ade7aa02aa895eb8ee8d175e11d4c88c3
| 6,316
|
py
|
Python
|
skidl/libs/motors_sklib.py
|
arjenroodselaar/skidl
|
0bf801bd3b74e6ef94bd9aa1b68eef756b568276
|
[
"MIT"
] | 700
|
2016-08-16T21:12:50.000Z
|
2021-10-10T02:15:18.000Z
|
skidl/libs/motors_sklib.py
|
0dvictor/skidl
|
458709a10b28a864d25ae2c2b44c6103d4ddb291
|
[
"MIT"
] | 118
|
2016-08-16T20:51:05.000Z
|
2021-10-10T08:07:18.000Z
|
skidl/libs/motors_sklib.py
|
0dvictor/skidl
|
458709a10b28a864d25ae2c2b44c6103d4ddb291
|
[
"MIT"
] | 94
|
2016-08-25T14:02:28.000Z
|
2021-09-12T05:17:08.000Z
|
from skidl import SKIDL, TEMPLATE, Part, Pin, SchLib
SKIDL_lib_version = '0.0.1'
motors = SchLib(tool=SKIDL).add_parts(*[
Part(name='Fan',dest=TEMPLATE,tool=SKIDL,keywords='Fan Motor',description='Fan',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x02', 'Connect:bornier2', 'TerminalBlock*2pol'],do_erc=True,pins=[
Pin(num='1',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='-',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Fan_ALT',dest=TEMPLATE,tool=SKIDL,keywords='Fan Motor',description='Fan without PWM or tach, alternative symbol',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x02', 'Connect:bornier2', 'TerminalBlock*2pol'],do_erc=True,pins=[
Pin(num='1',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='-',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Fan_IEC60617',dest=TEMPLATE,tool=SKIDL,keywords='Fan Motor IEC-60617',description='Fan (according to IEC-60617)',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x02', 'Connect:bornier2', 'TerminalBlock*2pol'],do_erc=True,pins=[
Pin(num='1',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='-',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Fan_Tacho',dest=TEMPLATE,tool=SKIDL,keywords='Fan Motor tacho',description='Fan, tacho output, 3-pin connector',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Fan_Pin_Header_Straight_1x03', 'Pin_Headers:Pin_Header_Straight_1x03', 'TerminalBlock*3pol', 'bornier3'],do_erc=True,aliases=['Fan_3pin', 'Fan_PC_Chassis'],pins=[
Pin(num='1',name='Tacho',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='-',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Fan_Tacho_PWM',dest=TEMPLATE,tool=SKIDL,keywords='Fan Motor tacho PWM',description='Fan, tacho output, PWM input, 4-pin connector',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Fan_Pin_Header_Straight_1x04', 'Pin_Headers:Pin_Header_Straight_1x04', 'TerminalBlock*4pol', 'bornier4'],do_erc=True,aliases=['Fan_CPU_4pin', 'Fan_4pin'],pins=[
Pin(num='1',name='-',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='Tacho',func=Pin.PASSIVE,do_erc=True),
Pin(num='4',name='PWM',do_erc=True)]),
Part(name='Motor_AC',dest=TEMPLATE,tool=SKIDL,keywords='AC Motor',description='AC Motor',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x02', 'Connect:bornier2', 'TerminalBlock*2pol'],do_erc=True,pins=[
Pin(num='1',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='~',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Motor_DC',dest=TEMPLATE,tool=SKIDL,keywords='DC Motor',description='DC Motor',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x02', 'Connect:bornier2', 'TerminalBlock*2pol'],do_erc=True,pins=[
Pin(num='1',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='-',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Motor_DC_ALT',dest=TEMPLATE,tool=SKIDL,keywords='DC Motor',description='DC Motor, alternative symbol',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x02', 'Connect:bornier2', 'TerminalBlock*2pol'],do_erc=True,pins=[
Pin(num='1',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='-',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Motor_Servo',dest=TEMPLATE,tool=SKIDL,keywords='Servo Motor',description='Servo Motor (Robbe connector)',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x03'],do_erc=True,aliases=['Motor_Servo_JR', 'Motor_Servo_Hitec', 'Motor_Servo_Futaba_J', 'Motor_Servo_Robbe', 'Motor_Servo_Grapner_JR'],pins=[
Pin(num='1',name='PWM',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='-',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Motor_Servo_AirTronics',dest=TEMPLATE,tool=SKIDL,keywords='Servo Motor',description='Servo Motor (AirTronics connector)',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x03'],do_erc=True,pins=[
Pin(num='1',name='+',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='-',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='PWM',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Stepper_Motor_bipolar',dest=TEMPLATE,tool=SKIDL,keywords='bipolar stepper motor',description='4-wire bipolar stepper motor',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x04', 'Connect:bornier4', 'TerminalBlock*4pol'],do_erc=True,pins=[
Pin(num='1',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='-',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='4',name='~',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Stepper_Motor_unipolar_5pin',dest=TEMPLATE,tool=SKIDL,keywords='unipolar stepper motor',description='5-wire unipolar stepper motor',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x05', 'Connect:bornier5', 'TerminalBlock*5pol'],do_erc=True,pins=[
Pin(num='1',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='-',func=Pin.PASSIVE,do_erc=True),
Pin(num='4',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='5',name='~',func=Pin.PASSIVE,do_erc=True)]),
Part(name='Stepper_Motor_unipolar_6pin',dest=TEMPLATE,tool=SKIDL,keywords='unipolar stepper motor',description='6-wire unipolar stepper motor',ref_prefix='M',num_units=1,fplist=['Pin_Headers:Pin_Header_Straight_1x06', 'Connect:bornier6', 'TerminalBlock*6pol'],do_erc=True,pins=[
Pin(num='1',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='2',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='3',name='-',func=Pin.PASSIVE,do_erc=True),
Pin(num='4',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='5',name='~',func=Pin.PASSIVE,do_erc=True),
Pin(num='6',name='~',func=Pin.PASSIVE,do_erc=True)])])
| 107.050847
| 358
| 0.681761
| 958
| 6,316
| 4.304802
| 0.100209
| 0.064258
| 0.115664
| 0.151309
| 0.850388
| 0.821775
| 0.798982
| 0.783463
| 0.760669
| 0.705383
| 0
| 0.026552
| 0.117479
| 6,316
| 58
| 359
| 108.896552
| 0.713312
| 0
| 0
| 0.5
| 0
| 0
| 0.294174
| 0.105605
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.696429
| 0.017857
| 0
| 0.017857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ed0699de98ad5c37f9861d47d24b9001ba4c5689
| 3,784
|
py
|
Python
|
trader/sample_library_tasks.py
|
ptro-development/trader
|
084a5c58351e8d7fb55d772ac030a91af940e4c6
|
[
"MIT"
] | null | null | null |
trader/sample_library_tasks.py
|
ptro-development/trader
|
084a5c58351e8d7fb55d772ac030a91af940e4c6
|
[
"MIT"
] | null | null | null |
trader/sample_library_tasks.py
|
ptro-development/trader
|
084a5c58351e8d7fb55d772ac030a91af940e4c6
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from trader.celery import app
from scipy.stats.stats import pearsonr
@app.task
def close_index(index, indexes, window_size=10):
not_allowed = [index - i for i in range(1, window_size)]
not_allowed.extend([index + i for i in range(1, window_size)])
found = False
for na in not_allowed:
if na in indexes:
found = True
break
return found
@app.task
def find_sample_correlations(
data, samples, sample_size, acceptable_correlation):
for d_index in range(0, len(data) - sample_size):
for s_index, sample in enumerate(samples):
# to avoid correlation to itself by not testing
# in window of size 2 * sample_size with
# sample["sample_position"] in the middle
if d_index < sample["sample_position"] - sample_size or d_index > sample["sample_position"] + sample_size: # noqa
cor, other = pearsonr(
sample["sample_data"],
data[d_index: d_index + sample_size])
if cor > acceptable_correlation:
# to avoid multiple close correlation matches
if not close_index(d_index, samples[s_index]["+correlation_positions"]): # noqa
samples[s_index]["+correlation_positions"].append(
d_index)
elif cor < -acceptable_correlation:
# to avoid multiple close correlation matches
if not close_index(d_index, samples[s_index]["-correlation_positions"]): # noqa
samples[s_index]["-correlation_positions"].append(
d_index)
return samples
@app.task
def find_first_sample_correlations(
data, samples, sample_size, acceptable_correlation):
for d_index in range(0, len(data) - sample_size):
for s_index, sample in enumerate(samples):
# to avoid correlation to itself by not testing
# in window of size 2 * sample_size with
# sample["sample_position"] in the middle
if d_index < sample["sample_position"] - sample_size or d_index > sample["sample_position"] + sample_size: # noqa
cor, other = pearsonr(
sample["sample_data"],
data[d_index: d_index + sample_size])
if cor > acceptable_correlation:
# to avoid multiple close correlation matches
if not close_index(d_index, samples[s_index]["+correlation_positions"]): # noqa
samples[s_index]["+correlation_positions"].append(
d_index)
break
elif cor < -acceptable_correlation:
# to avoid multiple close correlation matches
if not close_index(d_index, samples[s_index]["-correlation_positions"]): # noqa
samples[s_index]["-correlation_positions"].append(
d_index)
break
return samples
@app.task
def find_sample_correlations_no_limits(
data, samples, sample_size, acceptable_correlation):
for d_index in range(0, len(data) - sample_size):
for s_index, sample in enumerate(samples):
cor, other = pearsonr(
sample["sample_data"],
data[d_index: d_index + sample_size])
if cor > acceptable_correlation:
samples[s_index]["+correlation_positions"].append(
d_index)
elif cor < -acceptable_correlation:
samples[s_index]["-correlation_positions"].append(
d_index)
return samples
| 44.517647
| 126
| 0.585095
| 424
| 3,784
| 4.976415
| 0.160377
| 0.065403
| 0.061611
| 0.113744
| 0.888152
| 0.888152
| 0.853555
| 0.853555
| 0.853555
| 0.827014
| 0
| 0.003583
| 0.336152
| 3,784
| 84
| 127
| 45.047619
| 0.836385
| 0.120243
| 0
| 0.787879
| 0
| 0
| 0.094505
| 0.066425
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.045455
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed1484c2f3468493cfcddf8407f992d2ac2023af
| 170
|
py
|
Python
|
home/admin.py
|
subhangi2731/pixelvibe
|
6147e099fd53ef7f1f9b5f96aa0afb514233af71
|
[
"MIT"
] | null | null | null |
home/admin.py
|
subhangi2731/pixelvibe
|
6147e099fd53ef7f1f9b5f96aa0afb514233af71
|
[
"MIT"
] | null | null | null |
home/admin.py
|
subhangi2731/pixelvibe
|
6147e099fd53ef7f1f9b5f96aa0afb514233af71
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
# # from home.models import
from home.models import Contact,Gallery
admin.site.register((Contact,Gallery))
| 28.333333
| 39
| 0.794118
| 24
| 170
| 5.625
| 0.541667
| 0.118519
| 0.207407
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 170
| 6
| 40
| 28.333333
| 0.9
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed71f730077e70b960cc9c71728084d95536256c
| 27,208
|
py
|
Python
|
data_steward/analytics/rt_cdr_qc/cdr_deid_qa_report5_row_suppression_icd.py
|
lrwb-aou/curation
|
e80447e56d269dc2c9c8bc79e78218d4b0dc504c
|
[
"MIT"
] | 16
|
2017-06-30T20:05:05.000Z
|
2022-03-08T21:03:19.000Z
|
data_steward/analytics/rt_cdr_qc/cdr_deid_qa_report5_row_suppression_icd.py
|
lrwb-aou/curation
|
e80447e56d269dc2c9c8bc79e78218d4b0dc504c
|
[
"MIT"
] | 342
|
2017-06-23T21:37:40.000Z
|
2022-03-30T16:44:16.000Z
|
data_steward/analytics/rt_cdr_qc/cdr_deid_qa_report5_row_suppression_icd.py
|
lrwb-aou/curation
|
e80447e56d269dc2c9c8bc79e78218d4b0dc504c
|
[
"MIT"
] | 33
|
2017-07-01T00:12:20.000Z
|
2022-01-26T18:06:53.000Z
|
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.7.1
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# # QA queries on new CDR_deid Row Suppression-ICD10ICD9 Snome
#
# see [DC-852] AND [DC-732] for more details
import urllib
import pandas as pd
pd.options.display.max_rows = 120
# + tags=["parameters"]
project_id = ""
deid_cdr=""
# -
# df will have a summary in the end
df = pd.DataFrame(columns = ['query', 'result'])
# # 1 PRC_1 Verify all ICD9(764 -779)/ICD10(P) concept_codes used to specify other conditions originating In the perinatal period (including birth trauma),are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE
(vocabulary_id='ICD9CM' AND
(concept_code LIKE '764%' OR concept_code LIKE '765%' OR concept_code LIKE '766%' OR
concept_code LIKE '767%' OR concept_code LIKE '768%' OR concept_code LIKE '769%' OR concept_code LIKE '770%' OR
concept_code LIKE '771%' OR concept_code LIKE '772%' OR concept_code LIKE '773%' OR concept_code LIKE '774%' OR
concept_code LIKE '775%' OR concept_code LIKE '776%' OR concept_code LIKE '777%' OR concept_code LIKE '778%' OR
concept_code LIKE '779%'))
OR (vocabulary_id='ICD10CM' AND
concept_code LIKE 'P%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query1 ICD9(764 -779)/ICD10(P) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query1 ICD9(764 -779)/ICD10(P) in condition', 'result' : ''},
ignore_index = True)
df1
# # 2 PRC_2 Verify all ICD9(764 -779)/ICD10(P) concept_codes used to specify other conditions originating In the perinatal period (including birth trauma),are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE
(vocabulary_id='ICD9CM' AND
(concept_code LIKE '765%' OR concept_code LIKE '766%' OR
concept_code LIKE '767%' OR concept_code LIKE '768%' OR concept_code LIKE '769%' OR concept_code LIKE '770%' OR
concept_code LIKE '771%' OR concept_code LIKE '772%' OR concept_code LIKE '773%' OR concept_code LIKE '774%' OR
concept_code LIKE '775%' OR concept_code LIKE '776%' OR concept_code LIKE '777%' OR concept_code LIKE '778%' OR
concept_code LIKE '779%'))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'P%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query2 ICD9(764 -779)/ICD10(P)', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query2 ICD9(764 -779)/ICD10(P)', 'result' : ''},
ignore_index = True)
df1
# # 3 PRC_3 Verify all CD9(V3)/ICD10(Z38) concept_codes used to specify Liveborn infants according to type of birth are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE
(vocabulary_id='ICD9CM' AND (concept_code LIKE 'V3%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'Z38%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query3 CD9(V3)/ICD10(Z38) in obs', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query3 CD9(V3)/ICD10(Z38) in obs', 'result' : ''},
ignore_index = True)
df1
# # 4 PRC_4 Verify all CD9(V3)/ICD10(Z38) concept_codes used to specify Liveborn infants according to type of birth are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE
(vocabulary_id='ICD9CM' AND (concept_code LIKE 'V3%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'Z38%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query4 CD9(V3)/ICD10(Z38) in condition ', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query4 CD9(V3)/ICD10(Z38) in condition ', 'result' : ''},
ignore_index = True)
df1
# # 5 PRC_5 Verify all ICD9(798)/ICD10(R99) concept_codes used to specify Unknown cause of death are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE
(vocabulary_id='ICD9CM' AND (concept_code LIKE '798%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'R99%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query5 ICD9(798)/ICD10(R99) in obs', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query5 ICD9(798)/ICD10(R99) in obs', 'result' : ''},
ignore_index = True)
df1
# # 6 PRC_6 Verify all ICD9(799)/ICD10(R99) concept_codes used to specify Unknown cause of death are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
#
# <font color='red'> question, ICD9(798) is in the title but in the note, it is 799. ICD10 (R99) in the title but not in the note though. confused here.
#
# after test in the new cdr, should be ICD798 not 799. The original title was wrong.
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE '798%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'R99%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query6 ICD9(799)/ICD10(R99) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query6 ICD9(799)/ICD10(R99) in condition', 'result' : ''},
ignore_index = True)
df1
# # 7 PRC_7 Verify all ICD10(Y36) codes used to specify Injury due to war operations are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM
`{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E99%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'Y36%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query7 ICD10(Y36) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query7 ICD10(Y36) in condition', 'result' : ''},
ignore_index = True)
df1
# # 8 PRC_8 Verify all ICD10(Y36) codes used to specify Injury due to war operations are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND ( concept_code LIKE 'E100%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'Y36%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query8 ICD10(Y36) in obs', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query8 ICD10(Y36) in obs', 'result' : ''},
ignore_index = True)
df1
# # 9 PRC_9 Verify all ICD10(Y37) codes used to specify Military operations are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD10CM' AND concept_code LIKE 'Y37%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query9 ICD10(Y37) in observation', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query9 ICD10(Y37) in observation', 'result' : ''},
ignore_index = True)
df1
# # 10 PRC_10 Verify all ICD10(Y37) codes used to specify Military operations are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD10CM' AND concept_code LIKE 'Y37%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query10 ICD10(Y37) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query10 ICD10(Y37) in condition', 'result' : ''},
ignore_index = True)
df1
# # 11 PRC_11 Verify all ICD10(Y35) codes used to specify Legal intervention are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND ( concept_code LIKE 'E97%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'Y35%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query11 ICD10(Y35) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query11 ICD10(Y35) in condition', 'result' : ''},
ignore_index = True)
df1
# # 12 PRC_12 Verify all ICD10(Y38)/ICD9CM(E979) codes used to specify Terrorism are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND ( concept_code LIKE 'E979%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'Y38%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query12 ICD10(Y38)/ICD9CM(E979) in obs', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query12 ICD10(Y38)/ICD9CM(E979) in obs', 'result' : ''},
ignore_index = True)
df1
# # 13 PRC_13 Verify all ICD10(Y38)/ICD9CM(E979) codes used to specify Terrorism are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND ( concept_code LIKE 'E979%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'Y38%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query13 ICD10(Y38)/ICD9CM(E979) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query13 ICD10(Y38)/ICD9CM(E979) in condition', 'result' : ''},
ignore_index = True)
df1
# # 14 PRC_14 Verify all ICD9(E96)/ICD10(X92-Y09) codes used to specify Assault/Homicide are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E96%' ))
OR (vocabulary_id='ICD10CM' AND (concept_code LIKE 'X92%' OR
concept_code LIKE 'X93%' OR concept_code LIKE 'X94%' OR concept_code LIKE 'X95%' OR
concept_code LIKE 'X96%' OR concept_code LIKE 'X97%' OR concept_code LIKE 'X98%' OR
concept_code LIKE 'X99%' OR concept_code LIKE 'Y00%' OR concept_code LIKE 'Y01%' OR
concept_code LIKE 'Y02%' OR concept_code LIKE 'Y03%' OR concept_code LIKE 'Y04%' OR
concept_code LIKE 'Y05%' OR concept_code LIKE 'Y06%' OR concept_code LIKE 'Y07%' OR
concept_code LIKE 'Y08%' OR concept_code LIKE 'Y09%'))
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query14 ICD9(E96)/ICD10(X92-Y09) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query14 ICD9(E96)/ICD10(X92-Y09) in condition', 'result' : ''},
ignore_index = True)
df1
# # 15 PRC_15 Verify all ICD9(E96)/ICD10(X92-Y09) codes used to specify Assault/Homicide are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E96%' ))
OR (vocabulary_id='ICD10CM' AND (concept_code LIKE 'X92%' OR
concept_code LIKE 'X93%' OR concept_code LIKE 'X94%' OR concept_code LIKE 'X95%' OR
concept_code LIKE 'X96%' OR concept_code LIKE 'X97%' OR concept_code LIKE 'X98%' OR
concept_code LIKE 'X99%' OR concept_code LIKE 'Y00%' OR concept_code LIKE 'Y01%' OR
concept_code LIKE 'Y02%' OR concept_code LIKE 'Y03%' OR concept_code LIKE 'Y04%' OR
concept_code LIKE 'Y05%' OR concept_code LIKE 'Y06%' OR concept_code LIKE 'Y07%' OR
concept_code LIKE 'Y08%' OR concept_code LIKE 'Y09%'))
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query15 ICD9(E96)/ICD10(X92-Y09) in observation', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query15 ICD9(E96)/ICD10(X92-Y09) in observation', 'result' : ''},
ignore_index = True)
df1
# # 16 PRC_16 Verify all ICD9(E95) codes used to specify Suicide are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E95%' )) )
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query16 ICD9(E95) in observation', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query16 ICD9(E95) in observation', 'result' : ''},
ignore_index = True)
df1
# # 17 PRC_17 Verify all ICD9(E95) codes used to specify Suicide are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE vocabulary_id='ICD9CM' AND concept_code LIKE 'E95%' )
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query17 ICD9(E95) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query17 ICD9(E95) in condition', 'result' : ''},
ignore_index = True)
df1
# # 18 PRC_18 Verify all ICD9(E928.0)/ICD10(X52) codes used to specify Prolonged stay in weightlessness are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E928.0' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'X52%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query18 ICD9(E928.0)/ICD10(X52) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query18 in condition', 'result' : ''},
ignore_index = True)
df1
# # 19 PRC_19 Verify all ICD9(E928.0)/ICD10(X52) codes used to specify Prolonged stay in weightlessness are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E928.0' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'X52%')
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query19 ICD9(E928.0)/ICD10(X52) in obs', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query19 ICD9(E928.0)/ICD10(X52) in obs', 'result' : ''},
ignore_index = True)
df1
# # 20 PRC_20 Verify all ICD9(E910)/ICD10(W65-W74) codes used to specify Drowning are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND ( concept_code LIKE 'E910%' ))
OR (vocabulary_id='ICD10CM' AND (concept_code LIKE 'W65%' OR concept_code LIKE 'W66%' OR
concept_code LIKE 'W67%' OR concept_code LIKE 'W68%' OR concept_code LIKE 'W69%' OR concept_code LIKE 'W70%' OR
concept_code LIKE 'W71%' OR concept_code LIKE 'W72%' OR concept_code LIKE 'W73%' OR concept_code LIKE 'W74%'))
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query20 ICD9(E910)/ICD10(W65-W74) in observation', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query20 ICD9(E910)/ICD10(W65-W74) in observation', 'result' : ''},
ignore_index = True)
df1
# # 21 PRC_21 Verify all ICD9(E910)/ICD10(W65-W74) codes used to specify Drowning are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E910%' ))
OR (vocabulary_id='ICD10CM' AND (concept_code LIKE 'W65%' OR concept_code LIKE 'W66%' OR
concept_code LIKE 'W67%' OR concept_code LIKE 'W68%' OR concept_code LIKE 'W69%' OR concept_code LIKE 'W70%' OR
concept_code LIKE 'W71%' OR concept_code LIKE 'W72%' OR concept_code LIKE 'W73%' OR concept_code LIKE 'W74%'))
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query21 ICD9(E910)/ICD10(W65-W74) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query21 ICD9(E910)/ICD10(W65-W74) in condition', 'result' : ''},
ignore_index = True)
df1
# # 22 PRC_22 Verify all ICD9(E983)/ICD10(T71) codes used to specify Suffocation are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E913%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'T71%' )
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query22 ICD9(E983)/ICD10(T71) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query22 ICD9(E983)/ICD10(T71) in condition', 'result' : ''},
ignore_index = True)
df1
# # 23 PRC_23 Verify all ICD9(E913)/ICD10(T71) codes used to specify Suffocation are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND (concept_code LIKE 'E913%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'T71%' )
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query23 ICD9(E913)/ICD10(T71) in observation', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query23 ICD9(E913)/ICD10(T71) in observation', 'result' : ''},
ignore_index = True)
df1
# # 24 PRC_24 Verify all ICD9(E80-E84)/ICD10(V) codes used to specify Vehicle accident are not generated/displayed as condition_source_value in the CONDITION_OCCURENCE table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND
(concept_code LIKE 'E80%' OR concept_code LIKE 'E81%' OR concept_code LIKE 'E82%' OR
concept_code LIKE 'E83%' OR concept_code LIKE 'E84%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'V%' )
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.condition_occurrence` p1
JOIN ICD_suppressions p2
ON p1.condition_source_concept_id=p2.concept_id
WHERE condition_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query24 ICD9(E80-E84)/ICD10(V) in condition', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query24 ICD9(E80-E84)/ICD10(V) in condition', 'result' : ''},
ignore_index = True)
df1
# # 25 PRC_25 Verify all ICD9(E80-E84)/ICD10(V) codes used to specify Vehicle accident are not generated/displayed as observation_source_value in the OBSERVATION table
query = f'''
WITH ICD_suppressions AS (
SELECT concept_id
FROM `{project_id}.{deid_cdr}.concept`
WHERE (vocabulary_id='ICD9CM' AND
(concept_code LIKE 'E80%' OR concept_code LIKE 'E81%' OR concept_code LIKE 'E82%' OR
concept_code LIKE 'E83%' OR concept_code LIKE 'E84%' ))
OR (vocabulary_id='ICD10CM' AND concept_code LIKE 'V%' )
)
SELECT COUNT (*) AS n_row_not_pass
FROM `{project_id}.{deid_cdr}.observation` p1
JOIN ICD_suppressions p2
ON p1.observation_source_concept_id=p2.concept_id
WHERE observation_source_value IS NOT NULL
'''
df1=pd.read_gbq(query, dialect='standard')
if df1.loc[0].sum()==0:
df = df.append({'query' : 'Query25 ICD9(E80-E84)/ICD10(V) in observation', 'result' : 'PASS'},
ignore_index = True)
else:
df = df.append({'query' : 'Query25 ICD9(E80-E84)/ICD10(V) in observation', 'result' : ''},
ignore_index = True)
df1
# # Summary_Row_Suppression-ICD9/10
# if not pass, will be highlighted in red
df = df.mask(df.isin(['Null','']))
df.style.highlight_null(null_color='red').set_properties(**{'text-align': 'left'})
| 37.632089
| 241
| 0.702734
| 4,109
| 27,208
| 4.464103
| 0.07301
| 0.080957
| 0.110396
| 0.082484
| 0.947391
| 0.945647
| 0.94472
| 0.920242
| 0.918116
| 0.918116
| 0
| 0.058299
| 0.171604
| 27,208
| 722
| 242
| 37.684211
| 0.755535
| 0.186085
| 0
| 0.829787
| 0
| 0.017731
| 0.713917
| 0.234468
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.088652
| 0.003546
| 0
| 0.003546
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
ed8b6d4ecd419e4f53d3420f57c4ab90281dffac
| 36,168
|
py
|
Python
|
tb_rest_client/api/api_ce/auth_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 30
|
2020-06-19T06:42:50.000Z
|
2021-08-23T21:16:36.000Z
|
tb_rest_client/api/api_ce/auth_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 25
|
2021-08-30T01:17:27.000Z
|
2022-03-16T14:10:14.000Z
|
tb_rest_client/api/api_ce/auth_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 23
|
2020-07-06T13:41:54.000Z
|
2021-08-23T21:04:50.000Z
|
# coding: utf-8
"""
ThingsBoard REST API
ThingsBoard open-source IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3-SNAPSHOT
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class AuthControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def activate_user_using_post(self, **kwargs): # noqa: E501
"""Activate User # noqa: E501
Checks the activation token and updates corresponding user password in the database. Now the user may start using his password to login. The response already contains the [JWT](https://jwt.io) activation and refresh tokens, to simplify the user activation flow and avoid asking user to input password again after activation. If token is valid, returns the object that contains [JWT](https://jwt.io/) access and refresh tokens. If token is not valid, returns '404 Bad Request'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.activate_user_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ActivateUserRequest body:
:param bool send_activation_mail: sendActivationMail
:return: JWTTokenPair
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.activate_user_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.activate_user_using_post_with_http_info(**kwargs) # noqa: E501
return data
def activate_user_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Activate User # noqa: E501
Checks the activation token and updates corresponding user password in the database. Now the user may start using his password to login. The response already contains the [JWT](https://jwt.io) activation and refresh tokens, to simplify the user activation flow and avoid asking user to input password again after activation. If token is valid, returns the object that contains [JWT](https://jwt.io/) access and refresh tokens. If token is not valid, returns '404 Bad Request'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.activate_user_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ActivateUserRequest body:
:param bool send_activation_mail: sendActivationMail
:return: JWTTokenPair
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'send_activation_mail'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method activate_user_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'send_activation_mail' in params:
query_params.append(('sendActivationMail', params['send_activation_mail'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/activate{?sendActivationMail}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JWTTokenPair', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_password_using_post(self, **kwargs): # noqa: E501
"""Change password for current User (changePassword) # noqa: E501
Change the password for the User which credentials are used to perform this REST API call. Be aware that previously generated [JWT](https://jwt.io/) tokens will be still valid until they expire. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_password_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ChangePasswordRequest body:
:return: ObjectNode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.change_password_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.change_password_using_post_with_http_info(**kwargs) # noqa: E501
return data
def change_password_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Change password for current User (changePassword) # noqa: E501
Change the password for the User which credentials are used to perform this REST API call. Be aware that previously generated [JWT](https://jwt.io/) tokens will be still valid until they expire. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_password_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ChangePasswordRequest body:
:return: ObjectNode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_password_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/changePassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ObjectNode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def check_activate_token_using_get(self, activate_token, **kwargs): # noqa: E501
"""Check Activate User Token (checkActivateToken) # noqa: E501
Checks the activation token and forwards user to 'Create Password' page. If token is valid, returns '303 See Other' (redirect) response code with the correct address of 'Create Password' page and same 'activateToken' specified in the URL parameters. If token is not valid, returns '409 Conflict'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_activate_token_using_get(activate_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str activate_token: The activate token string. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_activate_token_using_get_with_http_info(activate_token, **kwargs) # noqa: E501
else:
(data) = self.check_activate_token_using_get_with_http_info(activate_token, **kwargs) # noqa: E501
return data
def check_activate_token_using_get_with_http_info(self, activate_token, **kwargs): # noqa: E501
"""Check Activate User Token (checkActivateToken) # noqa: E501
Checks the activation token and forwards user to 'Create Password' page. If token is valid, returns '303 See Other' (redirect) response code with the correct address of 'Create Password' page and same 'activateToken' specified in the URL parameters. If token is not valid, returns '409 Conflict'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_activate_token_using_get_with_http_info(activate_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str activate_token: The activate token string. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['activate_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_activate_token_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'activate_token' is set
if ('activate_token' not in params or
params['activate_token'] is None):
raise ValueError("Missing the required parameter `activate_token` when calling `check_activate_token_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'activate_token' in params:
query_params.append(('activateToken', params['activate_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/activate{?activateToken}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def check_reset_token_using_get(self, reset_token, **kwargs): # noqa: E501
"""Check password reset token (checkResetToken) # noqa: E501
Checks the password reset token and forwards user to 'Reset Password' page. If token is valid, returns '303 See Other' (redirect) response code with the correct address of 'Reset Password' page and same 'resetToken' specified in the URL parameters. If token is not valid, returns '409 Conflict'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_reset_token_using_get(reset_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_token: The reset token string. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_reset_token_using_get_with_http_info(reset_token, **kwargs) # noqa: E501
else:
(data) = self.check_reset_token_using_get_with_http_info(reset_token, **kwargs) # noqa: E501
return data
def check_reset_token_using_get_with_http_info(self, reset_token, **kwargs): # noqa: E501
"""Check password reset token (checkResetToken) # noqa: E501
Checks the password reset token and forwards user to 'Reset Password' page. If token is valid, returns '303 See Other' (redirect) response code with the correct address of 'Reset Password' page and same 'resetToken' specified in the URL parameters. If token is not valid, returns '409 Conflict'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_reset_token_using_get_with_http_info(reset_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_token: The reset token string. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reset_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_reset_token_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'reset_token' is set
if ('reset_token' not in params or
params['reset_token'] is None):
raise ValueError("Missing the required parameter `reset_token` when calling `check_reset_token_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'reset_token' in params:
query_params.append(('resetToken', params['reset_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPassword{?resetToken}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_password_policy_using_get(self, **kwargs): # noqa: E501
"""Get the current User password policy (getUserPasswordPolicy) # noqa: E501
API call to get the password policy for the password validation form(s). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_password_policy_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserPasswordPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_password_policy_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_password_policy_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_user_password_policy_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Get the current User password policy (getUserPasswordPolicy) # noqa: E501
API call to get the password policy for the password validation form(s). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_password_policy_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserPasswordPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_password_policy_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/userPasswordPolicy', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserPasswordPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_using_get(self, **kwargs): # noqa: E501
"""Get current User (getUser) # noqa: E501
Get the information about the User which credentials are used to perform this REST API call. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_user_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Get current User (getUser) # noqa: E501
Get the information about the User which credentials are used to perform this REST API call. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def logout_using_post(self, **kwargs): # noqa: E501
"""Logout (logout) # noqa: E501
Special API call to record the 'logout' of the user to the Audit Logs. Since platform uses [JWT](https://jwt.io/), the actual logout is the procedure of clearing the [JWT](https://jwt.io/) token on the client side. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logout_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.logout_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.logout_using_post_with_http_info(**kwargs) # noqa: E501
return data
def logout_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Logout (logout) # noqa: E501
Special API call to record the 'logout' of the user to the Audit Logs. Since platform uses [JWT](https://jwt.io/), the actual logout is the procedure of clearing the [JWT](https://jwt.io/) token on the client side. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logout_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logout_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/logout', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def request_reset_password_by_email_using_post(self, **kwargs): # noqa: E501
"""Request reset password email (requestResetPasswordByEmail) # noqa: E501
Request to send the reset password email if the user with specified email address is present in the database. Always return '200 OK' status for security purposes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_reset_password_by_email_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ResetPasswordEmailRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.request_reset_password_by_email_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.request_reset_password_by_email_using_post_with_http_info(**kwargs) # noqa: E501
return data
def request_reset_password_by_email_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Request reset password email (requestResetPasswordByEmail) # noqa: E501
Request to send the reset password email if the user with specified email address is present in the database. Always return '200 OK' status for security purposes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_reset_password_by_email_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ResetPasswordEmailRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method request_reset_password_by_email_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPasswordByEmail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def reset_password_using_post(self, **kwargs): # noqa: E501
"""Reset password (resetPassword) # noqa: E501
Checks the password reset token and updates the password. If token is valid, returns the object that contains [JWT](https://jwt.io/) access and refresh tokens. If token is not valid, returns '404 Bad Request'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_password_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ResetPasswordRequest body:
:return: JWTTokenPair
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.reset_password_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.reset_password_using_post_with_http_info(**kwargs) # noqa: E501
return data
def reset_password_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Reset password (resetPassword) # noqa: E501
Checks the password reset token and updates the password. If token is valid, returns the object that contains [JWT](https://jwt.io/) access and refresh tokens. If token is not valid, returns '404 Bad Request'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_password_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ResetPasswordRequest body:
:return: JWTTokenPair
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reset_password_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JWTTokenPair', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.620253
| 498
| 0.628373
| 4,271
| 36,168
| 5.068602
| 0.060876
| 0.045085
| 0.023282
| 0.029933
| 0.963322
| 0.95376
| 0.945214
| 0.936345
| 0.92937
| 0.921887
| 0
| 0.016167
| 0.286856
| 36,168
| 868
| 499
| 41.668203
| 0.82313
| 0.39087
| 0
| 0.804348
| 0
| 0
| 0.166865
| 0.053874
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041304
| false
| 0.056522
| 0.008696
| 0
| 0.11087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
9c029af0033954ba3211cac5361064a4c0fc1c4a
| 11,778
|
py
|
Python
|
api_1.4/containerd/services/leases/v1/leases_pb2_grpc.py
|
englandbaron/pycontainerd
|
9e5fea6e182a80508ce8b5725f407e50beba3cfe
|
[
"Apache-2.0"
] | null | null | null |
api_1.4/containerd/services/leases/v1/leases_pb2_grpc.py
|
englandbaron/pycontainerd
|
9e5fea6e182a80508ce8b5725f407e50beba3cfe
|
[
"Apache-2.0"
] | null | null | null |
api_1.4/containerd/services/leases/v1/leases_pb2_grpc.py
|
englandbaron/pycontainerd
|
9e5fea6e182a80508ce8b5725f407e50beba3cfe
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from containerd.services.leases.v1 import leases_pb2 as containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class LeasesStub(object):
"""Leases service manages resources leases within the metadata store.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Create = channel.unary_unary(
'/containerd.services.leases.v1.Leases/Create',
request_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.CreateRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.CreateResponse.FromString,
)
self.Delete = channel.unary_unary(
'/containerd.services.leases.v1.Leases/Delete',
request_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.DeleteRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.List = channel.unary_unary(
'/containerd.services.leases.v1.Leases/List',
request_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResponse.FromString,
)
self.AddResource = channel.unary_unary(
'/containerd.services.leases.v1.Leases/AddResource',
request_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.AddResourceRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteResource = channel.unary_unary(
'/containerd.services.leases.v1.Leases/DeleteResource',
request_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.DeleteResourceRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListResources = channel.unary_unary(
'/containerd.services.leases.v1.Leases/ListResources',
request_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResourcesRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResourcesResponse.FromString,
)
class LeasesServicer(object):
"""Leases service manages resources leases within the metadata store.
"""
def Create(self, request, context):
"""Create creates a new lease for managing changes to metadata. A lease
can be used to protect objects from being removed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Delete deletes the lease and makes any unreferenced objects created
during the lease eligible for garbage collection if not referenced
or retained by other resources during the lease.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
"""List lists all active leases, returning the full list of
leases and optionally including the referenced resources.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddResource(self, request, context):
"""AddResource references the resource by the provided lease.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteResource(self, request, context):
"""DeleteResource dereferences the resource by the provided lease.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListResources(self, request, context):
"""ListResources lists all the resources referenced by the lease.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_LeasesServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.CreateRequest.FromString,
response_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.CreateResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.DeleteRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListRequest.FromString,
response_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResponse.SerializeToString,
),
'AddResource': grpc.unary_unary_rpc_method_handler(
servicer.AddResource,
request_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.AddResourceRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteResource': grpc.unary_unary_rpc_method_handler(
servicer.DeleteResource,
request_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.DeleteResourceRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListResources': grpc.unary_unary_rpc_method_handler(
servicer.ListResources,
request_deserializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResourcesRequest.FromString,
response_serializer=containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResourcesResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'containerd.services.leases.v1.Leases', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Leases(object):
"""Leases service manages resources leases within the metadata store.
"""
@staticmethod
def Create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/containerd.services.leases.v1.Leases/Create',
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.CreateRequest.SerializeToString,
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.CreateResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/containerd.services.leases.v1.Leases/Delete',
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.DeleteRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def List(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/containerd.services.leases.v1.Leases/List',
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListRequest.SerializeToString,
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AddResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/containerd.services.leases.v1.Leases/AddResource',
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.AddResourceRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/containerd.services.leases.v1.Leases/DeleteResource',
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.DeleteResourceRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListResources(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/containerd.services.leases.v1.Leases/ListResources',
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResourcesRequest.SerializeToString,
containerd_dot_services_dot_leases_dot_v1_dot_leases__pb2.ListResourcesResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
| 49.075
| 138
| 0.696213
| 1,203
| 11,778
| 6.416459
| 0.117207
| 0.065293
| 0.076176
| 0.087058
| 0.820961
| 0.816816
| 0.799197
| 0.769659
| 0.731571
| 0.731571
| 0
| 0.009133
| 0.237731
| 11,778
| 239
| 139
| 49.280335
| 0.850635
| 0.091527
| 0
| 0.529412
| 1
| 0
| 0.088077
| 0.056824
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074866
| false
| 0
| 0.016043
| 0.032086
| 0.139037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c3836d83cb2d0c612eff9f302322f7d0e329d8f
| 3,360
|
py
|
Python
|
models/todo.py
|
Feng00111/todo-flask
|
21fa25220ad48f37830ba536d9babed66abc4dcd
|
[
"MIT"
] | null | null | null |
models/todo.py
|
Feng00111/todo-flask
|
21fa25220ad48f37830ba536d9babed66abc4dcd
|
[
"MIT"
] | null | null | null |
models/todo.py
|
Feng00111/todo-flask
|
21fa25220ad48f37830ba536d9babed66abc4dcd
|
[
"MIT"
] | null | null | null |
import time
from models import Model
# 针对我们的数据 TODO
# 我们要做 4 件事情
"""
C create 创建数据
R read 读取数据
U update 更新数据
D delete 删除数据
Todo.new() 来创建一个 todo
"""
class Todo(Model):
def __init__(self, form, user_id=-1):
self.id = form.get('id', None)
self.title = form.get('title', '')
self.completed = False
# self.deleted = False
# 和别的数据关联的方式, 用 user_id 表明拥有它的 user 实例
self.user_id = int(form.get('user_id', user_id))
# 添加创建和修改时间
self.ct = form.get('created_time', None)
self.ut = form.get('updated_time', None)
if self.ct is None:
self.ct = int(time.time())
self.ut = self.ct
def is_owner(self, id):
return self.user_id == id
# def ct(self):
# formats = '%H:%M:%S'
# value = time.localtime(self.ct)
# dt = time.strftime(formats, value)
# return dt
@classmethod
def update(cls, id, form):
t = cls.find(id)
valid_names = [
'title',
'completed',
'deleted',
]
for key in form:
# 这里只应该更新我们想要更新的东西
if key in valid_names:
setattr(t, key, form[key])
# 修改更新时间
t.ut = int(time.time())
t.save()
@classmethod
def complete(cls, id, completed):
"""
用法很方便 也可用于 deleted 字段(逻辑删除)
Todo.complete(1, True)
Todo.complete(2, False)
"""
t = cls.find(id)
t.completed = completed
t.save()
return t
@classmethod
def new(cls, form, user_id=-1):
"""
创建并保存一个 todo 并且返回它
Todo.new({'title': '吃饭'})
:param form: 一个字典 包含了 todo 的数据
:param user_id: 一个int 包含了 user_id 的数据
:return: 创建的 todo 实例
"""
# 下面一行相当于 t = Todo(form)
t = cls(form, user_id)
t.save()
return t
class TodoApi(Model):
def __init__(self, form, user_id=-1):
self.id = form.get('id', None)
self.todothing = form.get('todothing', '')
self.time = form.get('time', '')
# 和别的数据关联的方式, 用 user_id 表明拥有它的 user 实例
self.user_id = int(form.get('user_id', user_id))
# 添加创建和修改时间
self.ct = form.get('created_time', None)
self.ut = form.get('updated_time', None)
if self.ct is None:
self.ct = int(time.time())
self.ut = self.ct
def is_owner(self, id):
return self.user_id == id
def creat_time(self):
formats = '%H:%M:%S'
value = time.localtime(self.ct)
dt = time.strftime(formats, value)
return dt
@classmethod
def update(cls, id, form):
t = cls.find(id)
valid_names = [
'todothing',
'time',
'deleted',
]
for key in form:
# 这里只应该更新我们想要更新的东西
if key in valid_names:
setattr(t, key, form[key])
# 修改更新时间
t.ut = int(time.time())
t.save()
@classmethod
def new(cls, form, user_id=-1):
"""
创建并保存一个 todo 并且返回它
Todo.new({'title': '吃饭'})
:param form: 一个字典 包含了 todo 的数据
:param user_id: 一个int 包含了 user_id 的数据
:return: 创建的 todo 实例
"""
# 下面一行相当于 t = Todo(form)
t = cls(form, user_id)
t.save()
return t
| 24.888889
| 56
| 0.511012
| 429
| 3,360
| 3.911422
| 0.216783
| 0.071514
| 0.035757
| 0.026222
| 0.772348
| 0.772348
| 0.772348
| 0.772348
| 0.772348
| 0.772348
| 0
| 0.003273
| 0.363393
| 3,360
| 134
| 57
| 25.074627
| 0.781206
| 0.210119
| 0
| 0.72
| 0
| 0
| 0.055347
| 0
| 0
| 0
| 0
| 0.104478
| 0
| 1
| 0.133333
| false
| 0
| 0.026667
| 0.026667
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c48fe7c4930ebe25341b33e377c463e64f6687e
| 39
|
py
|
Python
|
run.py
|
namaggarwal/flask-upload
|
92813bb75379f8e74bfcde33ae66fd2cc80beb53
|
[
"MIT"
] | 1
|
2019-07-25T01:21:45.000Z
|
2019-07-25T01:21:45.000Z
|
run.py
|
namaggarwal/flask-upload
|
92813bb75379f8e74bfcde33ae66fd2cc80beb53
|
[
"MIT"
] | null | null | null |
run.py
|
namaggarwal/flask-upload
|
92813bb75379f8e74bfcde33ae66fd2cc80beb53
|
[
"MIT"
] | null | null | null |
from app import app
app.run('0.0.0.0')
| 13
| 19
| 0.666667
| 10
| 39
| 2.6
| 0.5
| 0.230769
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.128205
| 39
| 3
| 20
| 13
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0.175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
9c54843e0b7fd666bcd56b5cde49a686ff210990
| 86,572
|
py
|
Python
|
nextorch/plotting.py
|
wangyifan411/nextorch
|
3777f3d72969a23e82a68401a2f54b0e68e30da4
|
[
"MIT"
] | 18
|
2021-06-30T06:31:01.000Z
|
2022-02-15T18:35:44.000Z
|
nextorch/plotting.py
|
wangyifan411/nextorch
|
3777f3d72969a23e82a68401a2f54b0e68e30da4
|
[
"MIT"
] | 1
|
2021-12-07T06:15:20.000Z
|
2021-12-07T06:15:20.000Z
|
nextorch/plotting.py
|
wangyifan411/nextorch
|
3777f3d72969a23e82a68401a2f54b0e68e30da4
|
[
"MIT"
] | 2
|
2021-06-30T15:48:10.000Z
|
2021-10-06T16:27:18.000Z
|
"""
Creates 1-dimensional, 2-dimensional and 3-dimensional visualizations
The plots are rendered using matplotlib_ as a backend
.. _matplotlib: https://matplotlib.org/stable/index.html
"""
import os, sys
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import cm
from matplotlib.axes._axes import Axes
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.axes_grid1 import make_axes_locatable
import numpy as np
import torch
from torch import Tensor
from botorch.acquisition.acquisition import AcquisitionFunction
from botorch.models.model import Model
from typing import Optional, TypeVar, Union, Tuple, List
from nextorch.utils import ArrayLike1d, MatrixLike2d, create_full_X_test_1d, create_full_X_test_2d
from nextorch.utils import tensor_to_np, standardize_X, transform_Y_mesh_2d, unitscale_xv
from nextorch.bo import eval_acq_func, eval_objective_func, \
model_predict, model_predict_real, Experiment, EHVIMOOExperiment, WeightedMOOExperiment
# # Set matplotlib default values
# font = {'size' : 20}
# matplotlib.rc('font', **font)
# matplotlib.rcParams['axes.linewidth'] = 1.5
# matplotlib.rcParams['xtick.major.size'] = 8
# matplotlib.rcParams['xtick.major.width'] = 2
# matplotlib.rcParams['ytick.major.size'] = 8
# matplotlib.rcParams['ytick.major.width'] = 2
# matplotlib.rcParams["figure.dpi"] = 100
# matplotlib.rcParams['savefig.dpi'] = 600
# Set global plotting variables
colormap = cm.jet
figformat = 'png'
backgroundtransparency = False
#%% Parity plots
def parity(
y1: MatrixLike2d,
y2: MatrixLike2d,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Plot parity plot comparing the ground true
objective function values against predicted model mean
Parameters
----------
y1 : MatrixLike2d
Ground truth values
y2 : MatrixLike2d
Model predicted values
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[Union[str, int]], optional
Iteration number to add to the figure name
by default ''
"""
y1 = np.squeeze(tensor_to_np(y1))
y2 = np.squeeze(tensor_to_np(y2))
fig, ax = plt.subplots(figsize=(6,6))
ax.scatter(y1, y2, s=60, alpha = 0.5)
ax.set_xlabel("Ground Truth")
ax.set_ylabel("Prediction")
lims = [
np.min([y1.min(), y2.min()]), # min of both axes
np.max([y1.max(), y2.max()]), # max of both axes
]
# number of sections in the axis
nsections = 5
# now plot both limits against eachother
ax.plot(lims, lims, 'k--', alpha=0.75, zorder=0)
ax.set_xlim(lims)
ax.set_ylim(lims)
ax.set_xticks(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_yticks(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_xticklabels(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_yticklabels(np.around(np.linspace(lims[0], lims[1], nsections), 2))
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, 'parity_'+ str(i_iter) + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def parity_exp(
Exp: Experiment,
save_fig: Optional[bool] = False,
design_name: Optional[Union[str, int]] = 'final'):
"""Plot parity plot comparing the ground true
objective function values against predicted model mean
Using Experiment object
Parameters
----------
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
design_name : Optional[Union[str, int]], optional
Design name to add to the figure name
by default 'final'
"""
Y_real_pred = Exp.validate_training(show_confidence=False)
parity(y1=Exp.Y_real,
y2=Y_real_pred,
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter = design_name)
def parity_with_ci(
y1: MatrixLike2d,
y2: MatrixLike2d,
y2_lower: MatrixLike2d,
y2_upper: MatrixLike2d,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Plot parity plot comparing the ground true
objective function values against predicted model mean
with predicted confidence interval as error bars
Parameters
----------
y1 : MatrixLike2d
Ground truth values
y2 : MatrixLike2d
Model predicted values
y2_lower: MatrixLike2d
y2_upper: MatrixLike2d
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[Union[str, int]], optional
Iteration number to add to the figure name
by default ''
"""
y1 = np.squeeze(tensor_to_np(y1))
y2 = np.squeeze(tensor_to_np(y2))
y2_lower = np.squeeze(tensor_to_np(y2_lower))
y2_upper = np.squeeze(tensor_to_np(y2_upper))
# calculate the error margin
y2err = np.row_stack((np.abs(y2_lower - y2), np.abs(y2_upper - y2)))
fig, ax = plt.subplots(figsize=(6,6))
ax.errorbar(y1, y2, yerr = y2err, fmt = 'o', capsize = 2, alpha = 0.5)
ax.set_xlabel("Ground Truth")
ax.set_ylabel("Prediction")
lims = [
np.min([y1.min(), y2.min()]), # min of both axes
np.max([y1.max(), y2.max()]), # max of both axes
]
# number of sections in the axis
nsections = 5
# now plot both limits against eachother
ax.plot(lims, lims, 'k--', alpha=0.75, zorder=0)
ax.set_xlim(lims)
ax.set_ylim(lims)
ax.set_xticks(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_yticks(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_xticklabels(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_yticklabels(np.around(np.linspace(lims[0], lims[1], nsections), 2))
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, 'parity_w_ci_'+ str(i_iter) + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def parity_with_ci_exp(Exp: Experiment,
save_fig: Optional[bool] = False,
design_name: Optional[Union[str, int]] = 'final'):
"""Plot parity plot comparing the ground true
objective function values against predicted model mean
with predicted confidence interval as error bars
Using Experiment object
Parameters
----------
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
design_name : Optional[Union[str, int]], optional
Design name to add to the figure name
by default 'final'
"""
Y_real_pred, Y_lower_real_pred, Y_upper_real_pred = \
Exp.validate_training(show_confidence=True)
parity_with_ci(y1=Exp.Y_real,
y2=Y_real_pred,
y2_lower=Y_lower_real_pred,
y2_upper=Y_upper_real_pred,
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter = design_name)
#%% Discovery plots
def opt_per_trial(
Ys: Union[list, ArrayLike1d],
maximize: Optional[bool] = True,
Y_real_range: Optional[ArrayLike1d] = None,
Y_name: Optional[str] = None,
log_flag: Optional[bool] = False,
design_names: Optional[Union[str, List[str]]] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
):
"""Discovery plot
show the optimum value performance versus the trial number
i.e. the index of training data
Parameters
----------
Ys : Union[list, ArrayLike1d]
Response of each design in a real scale
maximize : Optional[bool], optional
by default True, maximize the objective function
Otherwise False, minimize the objective function
Y_real_range : ArrayLike1d
Ranges of the response, [lb, rb]
to show on the plot, by default None
Y_name : Optional[str], optional
Name of Y variable, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale, by default False
design_names : Optional[List[str]], optional
Names of the designs, by default None
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
"""
# if only one set of design is input, convert to list
if not isinstance(Ys, list):
Ys = [Ys]
# set default design names if none
if design_names is None:
design_names = ['design' + str(i) for i in range(len(Ys))]
if not isinstance(design_names, list):
design_names = [design_names]
# Set Y_name in file name
if Y_name is None:
Y_name = ''
# set the file name
# if only one set of design, use that design name
# else use comparison in the name
file_name = 'opt_per_trial_' + Y_name + '_'
if not isinstance(design_names, list):
file_name += design_names
else:
file_name += 'comparison'
# set the colors
colors = colormap(np.linspace(0, 1, len(Ys)))
# make the plot
fig,ax = plt.subplots(figsize=(8, 6))
for yi, ci, name_i in zip(Ys, colors, design_names):
if log_flag:
yi = np.log10(abs(yi))
if maximize:
opt_yi = np.maximum.accumulate(yi)
else:
opt_yi = np.minimum.accumulate(yi)
ax.plot(np.arange(len(yi)), opt_yi, '-o', color = ci, \
label = name_i, markersize=5, linewidth = 3, markerfacecolor="None")
if Y_real_range is not None:
ax.set_ylim(Y_real_range)
ax.set_xlabel('Trial Index')
ax.set_ylabel('Best Observed '+ Y_name)
ax.legend()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, file_name + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def opt_per_trial_exp(
Exp: Experiment,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
save_fig: Optional[bool] = False):
"""Discovery plot
show the optimum value performance versus the trial number
i.e. the index of training data
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
Y_real_range : ArrayLike1d
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale, by default False
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
opt_per_trial(Ys=Exp.Y_real,
maximize=Exp.maximize,
Y_name=Exp.Y_names[0],
Y_real_range=Y_real_range,
log_flag=log_flag,
save_fig=save_fig,
save_path=Exp.exp_path,
design_names='final')
#%% Functions for 1 dimensional systems
def acq_func_1d(
acq_func: AcquisitionFunction,
X_test: MatrixLike2d,
n_dim: Optional[int] = 1,
X_ranges: Optional[MatrixLike2d] = None,
x_index: Optional[int] = 0,
X_train: Optional[MatrixLike2d] = None,
X_new: Optional[MatrixLike2d] = None,
X_names: Optional[str] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Plot 1-dimensional acquision function
at the given dimension defined by x_index
Parameters
----------
acq_func : 'botorch.acquisition.AcquisitionFunction'_
the acquision function object
X_test : MatrixLike2d
Test data points for plotting
n_dim : Optional[int], optional
Dimensional of X, i.e., number of columns
by default 1
X_ranges : Optional[MatrixLike2d], optional
list of x ranges, by default None
x_index : Optional[int], optional
index of the x variable, by default 0
X_train : Optional[MatrixLike2d], optional
Training data points, by default None
X_new : Optional[MatrixLike2d], optional
The next data point, i.e the infill points,
by default None
X_names : Optional[str], optional
Name of X varibale shown as x-label
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[Union[str, int]], optional
Iteration index to add to the figure name
by default ''
.._'botorch.acquisition.AcquisitionFunction': https://botorch.org/api/acquisition.html
"""
# Set default axis names
if X_names is None:
if (n_dim == 1): X_name = 'x'
else: X_name = 'x' + str(x_index + 1)
else:
X_name = X_names[x_index]
# Set default [0,1] range for a unit scale
if X_ranges is None:
X_ranges = [[0,1]] * n_dim
# Set default number of sections
n_tick_sections = 5
# compute acquicision function values at X_test and X_train
acq_val_test = eval_acq_func(acq_func, X_test, return_type='np')
# Select the given dimension
x_test_1d = np.squeeze(tensor_to_np(X_test)[:, x_index])
# Initialize plot
fig, ax = plt.subplots(figsize=(12, 6))
ax.plot(x_test_1d, acq_val_test, 'b-', label = 'Acquisition')
# Plot training points as black
# Only for 1-dimensional system
if (n_dim == 1) and (X_train is not None):
acq_val_train = eval_acq_func(acq_func, X_train, return_type='np')
x_train = np.squeeze(tensor_to_np(X_train)[:, x_index])
ax.scatter(x_train, acq_val_train, s = 120, c= 'k', marker = '*', label = 'Initial Data')
# Plot the new infill points as red stars
# Only for 1-dimensional system
if (n_dim == 1) and (X_new is not None):
acq_val_new = eval_acq_func(acq_func, X_new, return_type='np')
x_new = np.squeeze(tensor_to_np(X_new)[:, x_index])
ax.scatter(x_new, acq_val_new, s = 120, c ='r', marker = '*', label = 'Infill Data')
ax.ticklabel_format(style = 'sci', axis = 'y' )#, scilimits = (-2,2) )
ax.set_xlabel(X_name)
xlim_plot = list(ax.set_xlim((0,1)))
ax.set_xticks(set_axis_values(xlim_plot, n_tick_sections))
ax.set_xticklabels(set_axis_values(X_ranges[x_index], n_tick_sections))
ax.set_ylabel(r'$ \alpha$')
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, 'acq_func_i'+ str(i_iter) + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def acq_func_1d_exp(Exp: Experiment,
X_new: Optional[MatrixLike2d] = None,
x_index: Optional[int] = 0,
fixed_values: Optional[Union[ArrayLike1d, float]] = None,
fixed_values_real: Optional[Union[ArrayLike1d, float]] = None,
baseline: Optional[str] = 'left',
mesh_size: Optional[int] = 41,
save_fig: Optional[bool] = False):
"""Plot 1-dimensional acquision function
at the given dimension defined by x_index
Using Experiment object
Parameters
----------
Exp : Experiment
Experiment object
X_new : Optional[MatrixLike2d], optional
The next data point, i.e the infill points,
by default None
x_index : Optional[int], optional
index of two x variables, by default 0
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default None
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default None
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
mesh_size : int, optional
mesh size, by default 41
"""
# Create 1d mesh test points
X_test_1d = create_full_X_test_1d(X_ranges=Exp.X_ranges,
x_index=x_index,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
acq_func_1d(acq_func = Exp.acq_func_current,
X_test=X_test_1d,
n_dim=Exp.n_dim,
X_ranges=Exp.X_ranges,
x_index=x_index,
X_train=Exp.X,
X_new=X_new,
X_names=Exp.X_names,
save_fig= save_fig,
save_path=Exp.exp_path,
i_iter = Exp.n_points - Exp.n_points_init)
def response_1d(
model: Model,
X_test: MatrixLike2d,
n_dim: Optional[int] = 1,
X_ranges: Optional[MatrixLike2d] = None,
x_index: Optional[int] = 0,
Y_test: Optional[MatrixLike2d] = None,
X_train: Optional[MatrixLike2d] = None,
Y_train: Optional[MatrixLike2d] = None,
X_new: Optional[MatrixLike2d] = None,
Y_new: Optional[MatrixLike2d] = None,
negate_Y: Optional[bool] = False,
plot_real: Optional[bool] = False,
Y_mean: Optional[MatrixLike2d] = None,
Y_std: Optional[MatrixLike2d] = None,
X_names: Optional[str] = None,
Y_name: Optional[str] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Plot response values
at the given dimension defined by x_index
Input X variables are in a unit scale and
Input Y variables are in a real scale
Parameters
----------
model : 'botorch.models.model.Model'_
A GP model
X_test : MatrixLike2d
Test data points for plotting
n_dim : Optional[int], optional
Dimensional of X, i.e., number of columns
by default 1
X_ranges : Optional[MatrixLike2d], optional
list of x ranges, by default None
x_index : Optional[int], optional
index of the x variable, by default 0
Y_test : Optional[MatrixLike2d], optional
Test Y data if the objective function is known,
by default None
X_train : Optional[MatrixLike2d], optional
Training X data points, by default None
Y_train : Optional[MatrixLike2d], optional
Training Y data points, by default None
X_new : Optional[MatrixLike2d], optional
The next X data point, i.e the infill points,
by default None
Y_new : Optional[MatrixLike2d], optional
The next Y data point, i.e the infill points,
by default None
plot_real : Optional[bool], optional
if true plot in the real scale for Y,
by default False
Y_mean : MatrixLike2d
The mean of initial Y set
Y_std : MatrixLike2d
The std of initial Y set
X_names: Optional[str], optional
Name of X varibales shown as x-label
Y_name: Optional[str], optional
Name of Y varibale shown as y-label
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[str], optional
Iteration number to add to the figure name
by default ''
Raises
------
ValueError
if X_train is provided but Y_train is not
ValueError
if X_new is provided but Y_new is not
ValueError
if plot in the real scale but Y_mean or Y_std is not provided
:_'botorch.models.model.Model': https://botorch.org/api/models.html
"""
# handle the edge cases
if (X_train is not None) and (Y_train is None):
raise ValueError("Plot X_train, must also input Y_train")
if (X_new is not None) and (Y_new is None):
raise ValueError("Plot X_new, must also input Y_new")
if not plot_real and (Y_mean is None or Y_std is None):
raise ValueError("Plot in the standard scale, must supply the mean and std of Y set")
# Set default axis names
if X_names is None:
if (n_dim == 1): X_name = 'x'
else: X_name = 'x' + str(x_index + 1)
else:
X_name = X_names[x_index]
# Set default axis names
if Y_name is None:
Y_name = 'y'
# Set default [0,1] range for a unit scale
if X_ranges is None:
X_ranges = [[0,1]] * n_dim
# Set default number of sections
n_tick_sections = 5
if plot_real: # Y in a real scale
Y_test_pred, Y_test_lower_pred, Y_test_upper_pred = model_predict_real(model=model,
X_test=X_test,
Y_mean=Y_mean,
Y_std=Y_std,
return_type= 'np',
negate_Y=negate_Y)
else: # Y in a standardized scale
Y_test = standardize_X(Y_test, Y_mean, Y_std, return_type= 'np') #standardize Y_test
Y_train = standardize_X(Y_train, Y_mean, Y_std, return_type= 'np')
Y_new = standardize_X(Y_new, Y_mean, Y_std, return_type= 'np')
Y_test_pred, Y_test_lower_pred, Y_test_upper_pred = model_predict(model=model,
X_test=X_test,
return_type= 'np',
negate_Y=negate_Y)
# Select the given dimension
x_test_1d = tensor_to_np(X_test)[:, x_index]
# reduce the dimension to 1d arrays
y_test_pred = Y_test_pred
y_test_lower_pred = Y_test_lower_pred
y_test_upper_pred = Y_test_upper_pred
# Initialize plot
fig, ax = plt.subplots(figsize=(12, 6))
# Plot model predicted posterior means as blue line
ax.plot(x_test_1d, y_test_pred, 'b', label = 'Posterior Mean')
# Shade between the lower and upper confidence bounds
ax.fill_between(x_test_1d, y_test_lower_pred, y_test_upper_pred, alpha=0.5, label = 'Confidence')
# Plot the groud truth Y_test if provided
# Only for 1-dimensional system
if (n_dim == 1) and (Y_test is not None):
y_test = np.squeeze(tensor_to_np(Y_test))
ax.plot(x_test_1d, y_test, 'k--', label = 'Objective f(x)')
# Plot training points as black stars
# Only for 1-dimensional system
if (n_dim == 1) and (X_train is not None):
x_train = np.squeeze(tensor_to_np(X_train)[:, x_index])
y_train = np.squeeze(tensor_to_np(Y_train))
ax.scatter(x_train, y_train, s =120, c= 'k', marker = '*', label = 'Initial Data')
# Plot the new infill points as red stars
# Only for 1-dimensional system
if (n_dim == 1) and (X_new is not None):
x_new = np.squeeze(tensor_to_np(X_new)[:, x_index])
y_new = np.squeeze(tensor_to_np(Y_new))
ax.scatter(x_new, y_new, s = 120, c = 'r', marker = '*', label = 'Infill Data')
ax.set_xlabel(X_name)
xlim_plot = list(ax.set_xlim(0, 1))
ax.set_xticks(set_axis_values(xlim_plot, n_tick_sections))
ax.set_xticklabels(set_axis_values(X_ranges[x_index], n_tick_sections))
ax.set_ylabel(Y_name)
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, 'objective_func_i'+ str(i_iter) + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def response_1d_exp(
Exp: Experiment,
X_new: Optional[MatrixLike2d] = None,
Y_new: Optional[MatrixLike2d] = None,
x_index: Optional[int] = 0,
y_index: Optional[int] = 0,
fixed_values: Optional[Union[ArrayLike1d, float]] = None,
fixed_values_real: Optional[Union[ArrayLike1d, float]] = None,
baseline: Optional[str] = 'left',
mesh_size: Optional[int] = 41,
plot_real: Optional[bool] = False,
save_fig: Optional[bool] = False):
"""Plot reponse valus
at the given dimension defined by x_index
using Experiment object
Parameters
----------
Exp : Experiment
Experiment object
X_test : MatrixLike2d
Test X data points for plotting
Y_test : Optional[MatrixLike2d], optional
Test Y data if the objective function is known,
by default None
X_new : Optional[MatrixLike2d], optional
The next X data point, i.e the infill points,
by default None
Y_new : Optional[MatrixLike2d], optional
The next Y data point, i.e the infill points,
by default None
x_index : Optional[int], optional
index of two x variables, by default 0
y_index : Optional[int], optional
index of the y variables, by default 0
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default None
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default None
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
mesh_size : int, optional
mesh size, by default 41
plot_real : Optional[bool], optional
if true plot in the real scale for Y,
by default False
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Create 1d mesh test points
X_test = create_full_X_test_1d(X_ranges=Exp.X_ranges,
x_index=x_index,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# if no Y_test input, generate Y_test from objective function
Y_test = None
if Exp.objective_func is not None:
Y_test = eval_objective_func(X_test, Exp.X_ranges, Exp.objective_func)
# if no Y_new input, generate Y_test from objective function
if (Exp.objective_func is not None) and (Y_new is None):
Y_new = eval_objective_func(X_new, Exp.X_ranges, Exp.objective_func)
response_1d(model = Exp.model,
X_test = X_test,
n_dim=Exp.n_dim,
x_index=x_index,
X_ranges=Exp.X_ranges,
Y_test = Y_test,
X_train = Exp.X,
Y_train = Exp.Y_real, #be sure to use Y_real
X_new = X_new,
Y_new = Y_new,
negate_Y = Exp.negate_Y,
plot_real = plot_real,
Y_mean = Exp.Y_mean,
Y_std= Exp.Y_std,
X_names=Exp.X_names,
Y_name=Exp.Y_names[y_index],
save_fig= save_fig,
save_path=Exp.exp_path,
i_iter = Exp.n_points - Exp.n_points_init)
#%% Functions for 2 dimensional systems on sampling
def set_axis_values(
xi_range: ArrayLike1d,
n_sections: Optional[int] = 2,
decimals: Optional[int] = 2
) -> ArrayLike1d:
"""Divide xi_range into n_sections
Parameters
----------
xi_range : ArrayLike1d
range of x, [left bound, right bound]
n_sections : Optional[int], optional
number of sections, by default 2
decimals : Optional[int], optional
number of decimal places to keep, by default 2
Returns
-------
axis_values: ArrayLike1d
axis values with rounding up
Number of values is n_sections + 1
"""
lb = xi_range[0]
rb = xi_range[1]
axis_values = np.linspace(lb, rb, n_sections+1, endpoint = True)
axis_values = np.around(axis_values, decimals = decimals)
return axis_values
def sampling_2d(
Xs: Union[MatrixLike2d, List[MatrixLike2d]],
X_ranges: Optional[MatrixLike2d] = None,
x_indices: Optional[List[int]] = [0, 1],
X_names: Optional[List[str]] = None,
design_names: Optional[Union[str, List[str]]] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
):
"""Plot sampling plan(s) in 2 dimensional space
X must be 2 dimensional
Parameters
----------
Xs : Union[MatrixLike2d, List[MatrixLike2d]]
The set of sampling plans,
Can be a list of matrices or one matrix
X_ranges : Optional[MatrixLike2d], optional
list of x ranges, by default None
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
X_name: Optional[str], optional
Names of X varibale shown as x,y-labels
design_names : Optional[List[str]], optional
Names of the designs, by default None
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
"""
# if only one set of design is input, convert to list
if not isinstance(Xs, list):
Xs = [Xs]
# set default design names if none
if design_names is None:
design_names = ['design' + str(i) for i in range(len(Xs))]
# set the file name
# if only one set of design, use that design name
# else use comparison in the name
file_name = 'sampling_2d_'
if not isinstance(design_names, list):
file_name += design_names
else:
file_name += 'comparison'
# Extract two variable indices for plotting
x_indices = sorted(x_indices)
index_0 = x_indices[0]
index_1 = x_indices[1]
# Set default axis names
n_dim = Xs[0].shape[1]
if X_names is None:
X_names = ['x' + str(xi+1) for xi in x_indices]
# Set default [0,1] range for a unit scale
if X_ranges is None:
X_ranges = [[0,1]] * n_dim
# Set default number of sections
n_tick_sections = 5
# set the colors
colors = colormap(np.linspace(0, 1, len(Xs)))
# make the plot
fig,ax = plt.subplots(figsize=(6, 6))
for Xi, ci, name_i in zip(Xs, colors, design_names):
ax.scatter(Xi[:, index_0], Xi[:, index_1], color = ci , s = 60, label = name_i, alpha = 0.6)
# Get axes limits
xlim_plot = list(ax.set_xlim(0, 1))
ylim_plot = list(ax.set_ylim(0, 1))
ax.legend(bbox_to_anchor=(1.04,1), loc="upper left")
ax.axis('square')
#ax.axis([0, 1, 0, 1])
ax.set_xlabel(X_names[index_0])
ax.set_ylabel(X_names[index_1])
ax.set_xticks(set_axis_values(xlim_plot, n_tick_sections))
ax.set_xticklabels(set_axis_values(X_ranges[index_0], n_tick_sections))
ax.set_yticks(set_axis_values(ylim_plot, n_tick_sections))
ax.set_yticklabels(set_axis_values(X_ranges[index_1], n_tick_sections))
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, file_name + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def sampling_2d_exp(
Exp: Experiment,
x_indices: Optional[List[int]] = [0, 1],
design_names: Optional[Union[str, List[str]]] = None,
save_fig: Optional[bool] = False):
"""Plot sampling plan(s) in 2 dimensional space
X must be 2 dimensional
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
design_names : Optional[List[str]], optional
Names of the designs, by default None
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Set the initial X set as the first design
X_init = tensor_to_np(Exp.X_init)
Xs = [X_init]
# Set the default design name
if design_names is None:
design_names = ['Initial']
# If there are infill points
if Exp.n_points > Exp.n_points_init:
X_infill = Exp.X[Exp.n_points_init:,:]
X_infill = tensor_to_np(X_infill)
Xs.append(X_infill)
design_names.append('Infill')
sampling_2d(Xs = Xs,
X_ranges=Exp.X_ranges,
x_indices=x_indices,
X_names=Exp.X_names,
design_names=design_names,
save_fig=save_fig,
save_path=Exp.exp_path)
def add_x_slice_2d(
ax: Axes,
xvalue: float,
yrange: List[float],
zrange: List[float],
mesh_size: Optional[int] = 100
) -> Axes:
"""Adds a 2-dimensional plane on x axis, parallel to y-z plane
in the 3-dimensional (x, y, z) space
Parameters
----------
ax : `matplotlib.axes.Axes.axis`_
Ax of the plot
xvalue : float
the value on x axis which the slice is made
yrange : list of float
[left bound, right bound] of y value
zrange : list of float
[left bound, right bound] of z value
mesh_size : Optional[int], optional
mesh size on the slice, by default 100
Returns
-------
ax : `matplotlib.axes.Axes.axis`_
Axes of the plots
.. _`matplotlib.axes.Axes.axis`: https://matplotlib.org/api/_as_gen/matplotlib.axes.Axes.axis.html
"""
colormap = cm.summer
Y, Z = np.meshgrid(np.linspace(yrange[0], yrange[1], mesh_size), np.linspace(zrange[0], zrange[1], mesh_size), indexing = 'ij')
X = xvalue * np.ones((mesh_size, mesh_size))
ax.plot_surface(X, Y, Z, cmap=colormap, rstride=1 , cstride=1, shade=False, alpha = 0.7)
return ax
def add_y_slice_2d(
ax: Axes,
yvalue: float,
xrange: List[float],
zrange: List[float],
mesh_size: Optional[int] = 100
) -> Axes:
"""Adds a 2-dimensional plane on y axis, parallel to x-z plane
in the 3-dimensional (x, y, z) space
Parameters
----------
ax : `matplotlib.axes.Axes.axis`_
Ax of the plot
yvalue : float
the value on y axis which the slice is made
xrange : list of float
[left bound, right bound] of x value
zrange : list of float
[left bound, right bound] of z value
mesh_size : Optional[int], optional
mesh size on the slice, by default 100
Returns
-------
ax : `matplotlib.axes.Axes.axis`_
Axes of the plots
.. _`matplotlib.axes.Axes.axis`: https://matplotlib.org/api/_as_gen/matplotlib.axes.Axes.axis.html
"""
colormap = cm.summer
X, Z = np.meshgrid(np.linspace(xrange[0], xrange[1], mesh_size), np.linspace(zrange[0], zrange[1], mesh_size), indexing = 'ij')
Y = yvalue * np.ones((mesh_size, mesh_size))
ax.plot_surface(X, Y, Z, cmap=colormap, rstride=1 , cstride=1, shade=False, alpha = 0.7)
return ax
def add_z_slice_2d(
ax: Axes,
zvalue: float,
xrange: List[float],
yrange: List[float],
mesh_size: Optional[int] = 100
) -> Axes:
"""Adds a 2-dimensional plane on z axis, parallel to x-y plane
in the 3-dimensional (x, y, z) space
Parameters
----------
ax : `matplotlib.axes.Axes.axis`_
Ax of the plot
zvalue : float
the value on z axis which the slice is made
xrange : list of float
[left bound, right bound] of x value
yrange : list of float
[left bound, right bound] of y value
mesh_size : Optional[int], optional
mesh size on the slice, by default 100
Returns
-------
ax : `matplotlib.axes.Axes.axis`_
Axes of the plots
.. _`matplotlib.axes.Axes.axis`: https://matplotlib.org/api/_as_gen/matplotlib.axes.Axes.axis.html
"""
colormap = cm.summer
X, Y = np.meshgrid(np.linspace(xrange[0], xrange[1], mesh_size), np.linspace(yrange[0], yrange[1], mesh_size), indexing = 'ij')
Z = zvalue * np.ones((mesh_size, mesh_size))
ax.plot_surface(X, Y, Z, cmap=colormap, rstride=1 , cstride=1, shade=False, alpha = 0.7)
#return ax
def sampling_3d(
Xs: Union[MatrixLike2d, List[MatrixLike2d]],
X_ranges: Optional[MatrixLike2d] = None,
x_indices: Optional[List[int]] = [0, 1, 2],
X_names: Optional[List[str]] = None,
slice_axis: Optional[Union[str, int]] = None,
slice_value: Optional[float] = None,
slice_value_real: Optional[float] = None,
design_names: Optional[Union[str, List[str]]] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None):
"""Plot sampling plan(s) in 3 dimensional space
X must be 3 dimensional
Parameters
----------
Xs : Union[MatrixLike2d, List[MatrixLike2d]]
The set of sampling plans in a unit scale,
Can be a list of matrices or one matrix
X_ranges : Optional[MatrixLike2d], optional
list of x ranges, by default None
x_indices : Optional[List[int]], optional
indices of three x variables, by default [0, 1, 2]
X_name: Optional[List(str)], optional
Names of X varibale shown as x,y,z-labels
slice_axis : Optional[Union[str, int]], optional
axis where a 2d slice is made, by default None
slice_value : Optional[float], optional
value on the axis where a 2d slide is made,
in a unit scale, by default None
slice_value_real : Optional[float], optional
value on the axis where a 2d slide is made,
in a real scale, by default None
design_names : Optional[List[str]], optional
Names of the designs, by default None
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
Raises
------
ValueError
if input axis is defined but the value is not given
ValueError
if input axis name is not x, y or z, or 0, 1, 2
"""
# if only one set of design is input, convert to list
if not isinstance(Xs, list):
Xs = [Xs]
# set default design names if none
if design_names is None:
design_names = ['design' + str(i) for i in range(len(Xs))]
if not isinstance(design_names, list):
design_names = [design_names]
# set the file name
# if only one set of design, use that design name
# else use comparison in the name
file_name = 'sampling_3d_'
if not isinstance(design_names, list):
file_name += design_names # for a single design, include its name
else:
file_name += 'comparison' # for multiple designs, use "comparison"
# Extract two variable indices for plotting
x_indices = sorted(x_indices)
index_0 = x_indices[0]
index_1 = x_indices[1]
index_2 = x_indices[2]
# Set default axis names
n_dim = Xs[0].shape[1]
if X_names is None:
X_names = ['x' + str(i+1) for i in range(n_dim)]
# Set default [0,1] range for a unit scale
if X_ranges is None:
X_ranges = [[0,1]] * n_dim
# Set default number of sections
n_tick_sections = 5
# set the colors
colors = colormap(np.linspace(0, 1, len(Xs)))
# Visualize sampling plan - a 3D scatter plot
fig = plt.figure(figsize = (8,8))
ax = fig.add_subplot(111, projection='3d')
for Xi, ci, name_i in zip(Xs, colors, design_names):
ax.scatter(Xi[:, index_0], Xi[:, index_1], Xi[:, index_2], \
color=ci, marker='o', s = 60, alpha = 0.6, label = name_i)
# Get axes limits
xlim_plot = list(ax.set_xlim(0, 1))
ylim_plot = list(ax.set_ylim(0, 1))
zlim_plot = list(ax.set_zlim(0, 1))
# Add a 2d slide if required
if slice_axis is not None:
if (slice_value is None) and (slice_value_real is None):
raise ValueError("Input a slice value")
if (slice_axis == 'x') or (slice_axis == 0):
if slice_value is None: # convert the slice value into a unit scale
slice_value = unitscale_xv(slice_value_real, X_ranges[0])
add_x_slice_2d(ax, slice_value, [0, 1], [0, 1])
file_name += '_slice_x'
elif (slice_axis == 'y') or (slice_axis == 1):
if slice_value is None:
slice_value = unitscale_xv(slice_value_real, X_ranges[1])
add_y_slice_2d(ax, slice_value, [0, 1], [0, 1])
file_name += '_slice_y'
elif (slice_axis == 'z') or (slice_axis == 2):
if slice_value is None:
slice_value = unitscale_xv(slice_value_real, X_ranges[2])
add_z_slice_2d(ax, slice_value, [0, 1], [0, 1])
file_name += '_slice_z'
else:
raise ValueError("Input slice_axis is not valid, must be x, y or z, or 0, 1, 2")
# set axis labels and ticks
ax.legend(bbox_to_anchor=(1.04,1), loc="upper left")
ax.set_xlabel(X_names[index_0], labelpad= 15)
ax.set_ylabel(X_names[index_1],labelpad= 15)
ax.set_zlabel(X_names[index_2],labelpad=3)
ax.set_xticks(set_axis_values(xlim_plot, n_tick_sections))
ax.set_xticklabels(set_axis_values(X_ranges[index_0], n_tick_sections))
ax.set_yticks(set_axis_values(ylim_plot, n_tick_sections))
ax.set_yticklabels(set_axis_values(X_ranges[index_1], n_tick_sections))
ax.set_zticks(set_axis_values(zlim_plot, n_tick_sections))
ax.set_zticklabels(set_axis_values(X_ranges[index_2], n_tick_sections))
ax.view_init(30, 45)
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, file_name + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def sampling_3d_exp(
Exp: Experiment,
x_indices: Optional[List[int]] = [0, 1, 2],
slice_axis: Optional[str] = None,
slice_value: Optional[float] = None,
slice_value_real: Optional[float] = None,
design_names: Optional[Union[str, List[str]]] = None,
save_fig: Optional[bool] = False):
"""Plot sampling plan(s) in 3 dimensional space
X must be 3 dimensional
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
x_indices : Optional[List[int]], optional
indices of three x variables, by default [0, 1, 2]
slice_axis : Optional[str], optional
axis where a 2d slice is made, by default None
slice_value : Optional[float], optional
value on the axis where a 2d slide is made,
by default None
slice_value_real : Optional[float], optional
value on the axis where a 2d slide is made,
in a real scale, by default None
design_names : Optional[List[str]], optional
Names of the designs, by default None
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Set the initial X set as the first design
X_init = tensor_to_np(Exp.X_init)
Xs = [X_init]
# Set the default design name
if design_names is None:
design_names = ['Initial']
# If there are infill points
if Exp.n_points > Exp.n_points_init:
X_infill = Exp.X[Exp.n_points_init:,:]
X_infill = tensor_to_np(X_infill)
Xs.append(X_infill)
design_names.append('Infill')
ax= sampling_3d(Xs=Xs,
X_ranges=Exp.X_ranges,
x_indices=x_indices,
X_names=Exp.X_names,
slice_axis=slice_axis,
slice_value=slice_value,
slice_value_real=slice_value_real,
design_names=design_names,
save_fig=save_fig,
save_path=Exp.exp_path)
#%% Functions for 2 dimensional systems on response heatmaps
def response_heatmap(
Y_real: MatrixLike2d,
Y_real_range: Optional[ArrayLike1d] = None,
Y_name: Optional[str] = None,
log_flag: Optional[bool] = False,
n_dim: Optional[int] = 2,
x_indices: Optional[List[int]] = [0, 1],
X_ranges: Optional[MatrixLike2d] = None,
X_names: Optional[List[str]] = None,
X_train: Optional[MatrixLike2d] = None,
X_new: Optional[MatrixLike2d] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Show a heat map for the response in a real scale
Parameters
----------
Y_real : MatrixLike2d
Response in a real scale
Y_real_range : ArrayLike1d
Ranges of the response, [lb, rb]
to show on the plot, by default None
Y_name : Optional[str], optional
Names of Y variable, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale, by default False
n_dim : Optional[int], optional
Dimensional of X, i.e., number of columns
by default 2
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
X_ranges : Optional[MatrixLike2d], optional
list of x ranges, by default None
X_name: Optional[List(str)], optional
Names of X varibale shown as x,y,z-labels
by default None
X_train : Optional[MatrixLike2d], optional
Data points used in training, by default None
X_new : Optional[MatrixLike2d], optional
The next data point, i.e the infill points,
by default None
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[str], optional
Iteration number to add to the figure name
by default '''
"""
'''
Takes in the function value X, sampling plan X
Makes heat map and show the locations of sampling points
all input are numpy matrices
'''
# Preprocess Y_real
Y_real = tensor_to_np(Y_real)
# Set default Y_real_range
if Y_real_range is None:
Y_real_range = [np.min(Y_real), np.max(Y_real)]
if log_flag:
Y_real = np.log10(abs(Y_real))
# Extract two variable indices for plotting
x_indices = sorted(x_indices)
index_0 = x_indices[0]
index_1 = x_indices[1]
# Set default axis names
if X_names is None:
X_names = ['x' + str(xi + 1) for xi in range(n_dim)]
# Set Y_name in file name
if Y_name is None:
Y_name = 'y'
# set the file name
filename = 'heatmap_'+ Y_name + '_' + str(index_0) +\
str(index_1) + '_i_' + str(i_iter)
# Set default [0,1] range for a unit scale
if X_ranges is None:
X_ranges = [[0,1]] * n_dim
# Set default number of sections
n_tick_sections = 5
# Visualize response - a 2D heatmap
fig,ax = plt.subplots(figsize=(6, 6))
im = ax.imshow(Y_real, cmap = 'jet', interpolation = 'gaussian', \
vmin = Y_real_range[0], vmax = Y_real_range[1], origin = 'lower', \
extent = (0,1,0,1))
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.1)
plt.colorbar(im, cax = cax)
# Viusalize the sampling points as hollow black scatter points
# Only for 2-dimensional system
if (n_dim == 2) and (X_train is not None) :
ax.scatter(X_train[:,index_0], X_train[:,index_1], s = 60, \
c = 'white', edgecolors= 'k', alpha = 0.6)
# Viusalize the infill points as hollow red scatter points
# Only for 2-dimensional system
if (n_dim == 2) and (X_new is not None) :
ax.scatter(X_new[:,index_0], X_new[:,index_1], s = 60, \
c = 'white', edgecolors= 'r', alpha = 0.6)
# Obtain axes limits
xlim_plot = list(ax.set_xlim((0,1)))
ylim_plot = list(ax.set_ylim((0,1)))
# set axis labels and ticks
ax.set_xlabel(X_names[index_0])
ax.set_ylabel(X_names[index_1])
ax.set_xticks(set_axis_values(xlim_plot, n_tick_sections))
ax.set_xticklabels(set_axis_values(X_ranges[index_0], n_tick_sections))
ax.set_yticks(set_axis_values(ylim_plot, n_tick_sections))
ax.set_yticklabels(set_axis_values(X_ranges[index_1], n_tick_sections))
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, filename + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def response_heatmap_exp(
Exp: Experiment,
X_new: Optional[MatrixLike2d] = None,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
fixed_values: Optional[Union[ArrayLike1d, float]] = [],
fixed_values_real: Optional[Union[ArrayLike1d, float]] = [],
baseline: Optional[str] = 'left',
mesh_size: Optional[int] = 41,
show_samples: Optional[bool] = True,
save_fig: Optional[bool] = False):
"""Show a heat map for the response in a real scale
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
X_new : Optional[MatrixLike2d], optional
The next data point, i.e the infill points,
by default None
Y_real_range : Optional[ArrayLike1d], optional
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default []
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default []
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
mesh_size : Optional[int], optional
mesh size, by default 41
show_samples: Optional[bool], optional
if true show the sample points
by default True
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Create 2D mesh test points
X_test, _, _ = create_full_X_test_2d(X_ranges=Exp.X_ranges,
x_indices=x_indices,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# Make prediction using the GP model
Y_test = Exp.predict_real(X_test)
Y_test_2d = transform_Y_mesh_2d(Y_test, mesh_size=mesh_size)
# select the sample points
X_train = None
if show_samples: X_train = Exp.X
response_heatmap(Y_real=Y_test_2d,
Y_real_range = Y_real_range,
Y_name=Exp.Y_names[0],
log_flag=log_flag,
n_dim=Exp.n_dim,
x_indices=x_indices,
X_ranges=Exp.X_ranges,
X_names=Exp.X_names,
X_train=X_train,
X_new=X_new,
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter=Exp.n_points - Exp.n_points_init)
def objective_heatmap_exp(
Exp: Experiment,
X_new: Optional[MatrixLike2d] = None,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
fixed_values: Optional[Union[ArrayLike1d, float]] = [],
fixed_values_real: Optional[Union[ArrayLike1d, float]] = [],
baseline: Optional[str] = 'left',
mesh_size: Optional[int] = 41,
show_samples: Optional[bool] = True,
save_fig: Optional[bool] = False):
"""Show a heat map for objective function in a real scale
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
X_new : Optional[MatrixLike2d], optional
The next data point, i.e the infill points,
by default None
Y_real_range : Optional[ArrayLike1d], optional
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default []
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default []
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
mesh_size : Optional[int], optional
mesh size, by default 41
show_samples: Optional[bool], optional
if true show the sample points
by default True
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Create 2D mesh test points
X_test, _, _ = create_full_X_test_2d(X_ranges=Exp.X_ranges,
x_indices=x_indices,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# Calculate objective function value
Y_obj_test = eval_objective_func(X_test, Exp.X_ranges, Exp.objective_func)
Y_obj_test_2d = transform_Y_mesh_2d(Y_obj_test, mesh_size=mesh_size)
# select the sample points
X_train = None
if show_samples: X_train = Exp.X
response_heatmap(Y_real=Y_obj_test_2d,
Y_real_range = Y_real_range,
Y_name=Exp.Y_names[0],
log_flag= log_flag,
n_dim=Exp.n_dim,
x_indices=x_indices,
X_ranges=Exp.X_ranges,
X_names=Exp.X_names,
X_train=X_train,
X_new=X_new,
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter='objective')
def objective_heatmap(
objective_func: object,
X_ranges: MatrixLike2d,
Y_name: Optional[str] = None,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
fixed_values: Optional[Union[ArrayLike1d, float]] = [],
fixed_values_real: Optional[Union[ArrayLike1d, float]] = [],
X_names: Optional[List[str]] = None,
X_train: Optional[MatrixLike2d] = None,
X_new: Optional[MatrixLike2d] = None,
baseline: Optional[str] = 'left',
mesh_size: Optional[int] = 41,
save_fig: Optional[bool] = False,
name: Optional[str] = 'simple_experiment'
):
"""Show a 3-dimensional response surface
in a real scale
Using the experiment object
Parameters
----------
objective_func : function object
a objective function to optimize
X_ranges : MatrixLike2d,
list of x ranges
Y_name : Optional[str], optional
Name of Y variable, by default None
Y_real_range : Optional[ArrayLike1d], optional
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default []
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default []
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
X_name: Optional[List(str)], optional
Names of X varibale shown as x,y,z-labels
by default None
X_train : Optional[MatrixLike2d], optional
Data points used in training, by default None
X_new : Optional[MatrixLike2d], optional
The next data point, i.e the infill points,
by default None
mesh_size : Optional[int], optional
mesh size, by default 41
save_fig: Optional[bool], optional
if true save the plot
by default False
name : Optional[str], optional
Name of the objective function,
by default 'simple_experiment'
"""
n_dim = len(X_ranges)
# Create 2D mesh test points
X_test, _, _ = create_full_X_test_2d(X_ranges=X_ranges,
x_indices=x_indices,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# Calculate objective function value
Y_obj_test = eval_objective_func(X_test, X_ranges, objective_func)
Y_obj_test_2d = transform_Y_mesh_2d(Y_obj_test, mesh_size=mesh_size)
# Set up the path to save graphical results
parent_dir = os.getcwd()
exp_path = os.path.join(parent_dir, name)
response_heatmap(Y_real=Y_obj_test_2d,
Y_real_range=Y_real_range,
Y_name=Y_name,
log_flag= log_flag,
n_dim=n_dim,
x_indices=x_indices,
X_ranges=X_ranges,
X_names=X_names,
X_train=X_train,
X_new=X_new,
save_fig=save_fig,
save_path=exp_path,
i_iter='objective')
def response_heatmap_err_exp(
Exp: Experiment,
X_new: Optional[MatrixLike2d] = None,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
fixed_values: Optional[Union[ArrayLike1d, float]] = [],
fixed_values_real: Optional[Union[ArrayLike1d, float]] = [],
baseline: Optional[str] = 'left',
mesh_size: Optional[int] = 41,
save_fig: Optional[bool] = False):
"""Show a heat map for percentage error
(objective - response)/objective in a real scale
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
X_new : Optional[MatrixLike2d], optional
The next data point, i.e the infill points,
by default None
Y_real_range : Optional[ArrayLike1d], optional
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default []
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default []
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
mesh_size : Optional[int], optional
mesh size, by default 41
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Create 2D mesh test points
X_test, _, _ = create_full_X_test_2d(X_ranges=Exp.X_ranges,
x_indices=x_indices,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# Make prediction using the GP model
Y_test = Exp.predict_real(X_test)
Y_test_2d = transform_Y_mesh_2d(Y_test, mesh_size=mesh_size)
# Calculate objective function value
Y_obj_test = eval_objective_func(X_test, Exp.X_ranges, Exp.objective_func)
Y_obj_test_2d = transform_Y_mesh_2d(Y_obj_test, mesh_size=mesh_size)
# Calculate the percentage errors
Y_err_2d = np.abs((Y_obj_test_2d - Y_test_2d)/Y_obj_test_2d)
response_heatmap(Y_real=Y_err_2d,
Y_real_range = Y_real_range,
Y_name = Exp.Y_names[0]+'_error',
log_flag= log_flag,
n_dim=Exp.n_dim,
x_indices=x_indices,
X_ranges=Exp.X_ranges,
X_names=Exp.X_names,
X_train=Exp.X,
X_new=X_new,
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter=Exp.n_points - Exp.n_points_init)
#%% Functions for 2 dimensional systems on response sufaces
def response_surface(
X1_test: MatrixLike2d,
X2_test: MatrixLike2d,
Y_real: MatrixLike2d,
Y_real_lower: Optional[MatrixLike2d] = None,
Y_real_upper: Optional[MatrixLike2d] = None,
Y_real_range: Optional[ArrayLike1d] = None,
Y_name: Optional[str] = None,
n_dim: Optional[int] = 2,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
X_ranges: Optional[MatrixLike2d] = None,
X_names: Optional[List[str]] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Plot a response surface in 3-dimensional space
Parameters
----------
X1_test : MatrixLike2d
[description]
X2_test : MatrixLike2d
[description]
Y_real : MatrixLike2d
Response in a real scale
Y_real_lower : Optional[MatrixLike2d], optional
Model predicted lower bound in a real scale,
by default None
Y_real_upper : Optional[MatrixLike2d], optional
Model predicted lower bound in a real scale, , by default None
Y_real_range : ArrayLike1d
Ranges of the response, [lb, rb]
Y_name : Optional[str], optional
Name of Y variable, by default None
n_dim : Optional[int], optional
Dimensional of X, i.e., number of columns
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
X_ranges : Optional[MatrixLike2d], optional
list of x ranges, by default None
X_name: Optional[List(str)], optional
Names of X varibale shown as x,y,z-labels
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[str], optional
Iteration number to add to the figure name
by default '''
"""
# Preprocess Y_real
X1_test = tensor_to_np(X1_test)
X2_test = tensor_to_np(X2_test)
Y_real = tensor_to_np(Y_real)
if Y_real_lower is not None:
Y_real_lower = tensor_to_np(Y_real_lower)
if Y_real_upper is not None:
Y_real_upper = tensor_to_np(Y_real_upper)
# Set default Y_real_range
if Y_real_range is None:
Y_real_range = [np.min(Y_real), np.max(Y_real)]
if log_flag:
Y_real = np.log10(abs(Y_real))
if Y_real_lower is not None:
Y_real_lower = np.log10(abs(Y_real_lower))
if Y_real_upper is not None:
Y_real_upper = np.log10(abs(Y_real_upper))
# Extract two variable indices for plotting
x_indices = sorted(x_indices)
index_0 = x_indices[0]
index_1 = x_indices[1]
# Set default axis names
if X_names is None:
X_names = ['x' + str(xi + 1) for xi in range(n_dim)]
# Set Y_name in file name
if Y_name is None:
Y_name = 'y'
Y_name_plot = 'y'
else:
Y_name_plot = Y_name
# set the file name
filename = 'surface_'+ Y_name + '_' + str(index_0) +\
str(index_1) + '_i_' + str(i_iter)
# Set default [0,1] range for a unit scale
if X_ranges is None:
X_ranges = [[0,1]] * n_dim
# Set default number of sections
n_tick_sections = 5
# Visualize response - a 3D surfaceplot
fig = plt.figure(figsize = (10,10))
ax = fig.add_subplot(111, projection='3d')
ax.plot_surface(X1_test, X2_test, Y_real, cmap = 'jet', \
vmin = Y_real_range[0], vmax = Y_real_range[1])
# Obtain axes limits
xlim_plot = list(ax.set_xlim(0, 1))
ylim_plot = list(ax.set_ylim(0, 1))
zlim_plot = list(ax.set_zlim(Y_real_range))
if Y_real_lower is not None:
ax.plot_surface(X1_test, X2_test, Y_real_lower, cmap = 'Blues', alpha = 0.7, \
vmin = Y_real_range[0], vmax = Y_real_range[1])
if Y_real_upper is not None:
ax.plot_surface(X1_test, X2_test, Y_real_upper, cmap = 'Reds', alpha = 0.7, \
vmin = Y_real_range[0], vmax = Y_real_range[1])
# set axis labels and ticks
ax.set_xlabel(X_names[index_0], labelpad=15)
ax.set_ylabel(X_names[index_1], labelpad=15)
ax.set_zlabel(Y_name_plot, labelpad=10)
ax.set_xticks(set_axis_values(xlim_plot, n_tick_sections))
ax.set_xticklabels(set_axis_values(X_ranges[index_0], n_tick_sections))
ax.set_yticks(set_axis_values(ylim_plot, n_tick_sections))
ax.set_yticklabels(set_axis_values(X_ranges[index_1], n_tick_sections))
ax.set_zticks(set_axis_values(zlim_plot, n_tick_sections, 1))
ax.set_zticklabels(set_axis_values(zlim_plot, n_tick_sections, 1))
ax.view_init(30, 45)
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, filename + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def response_scatter_exp(
Exp: Experiment,
Y_real_range: Optional[ArrayLike1d] = None,
Y_name: Optional[str] = None,
n_dim: Optional[int] = 3,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1, 2],
X_ranges: Optional[MatrixLike2d] = None,
X_names: Optional[List[str]] = None,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Plot a response surface in 3-dimensional space
Parameters
----------
Y_real_range : ArrayLike1d
Ranges of the response, [lb, rb]
Y_name : Optional[str], optional
Name of Y variable, by default None
n_dim : Optional[int], optional
Dimensional of X, i.e., number of columns
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
X_ranges : MatrixLike2d, optional
list of x ranges, by default None
X_names: Optional[List(str)], optional
Names of X varibale shown as x,y,z-labels
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[str], optional
Iteration number to add to the figure name
by default '''
"""
Y_real = Exp.Y_real
# Set default Y_real_range
if Y_real_range is None:
Y_real_range = [np.min(Y_real), np.max(Y_real)]
if log_flag:
Y_real = np.log10(abs(Y_real))
# Extract two variable indices for plotting
x_indices = sorted(x_indices)
index_0 = x_indices[0]
index_1 = x_indices[1]
index_2 = x_indices[2]
# Set default axis names
if X_names is None:
X_names = ['x' + str(xi + 1) for xi in range(n_dim)]
# Set Y_name in file name
if Y_name is None:
Y_name = 'y'
Y_name_plot = 'y'
else:
Y_name_plot = Y_name
# set the file name
filename = 'scatter_'+ Y_name + '_' + str(index_0) +\
str(index_1) + str(index_2) + '_i_' + str(i_iter)
# Set default X_ranges
if X_ranges is None:
X_ranges = [[np.min(Exp.X_real[:, index_0]), np.max(Exp.X_real[:, index_0])],
[np.min(Exp.X_real[:, index_1]), np.max(Exp.X_real[:, index_1])],
[np.min(Exp.X_real[:, index_2]), np.max(Exp.X_real[:, index_2])]]
# Set default number of sections
n_tick_sections = 5
# Visualize response - a 3D surfaceplot
fig = plt.figure(figsize = (10,10))
ax = fig.add_subplot(111, projection='3d')
im = ax.scatter(Exp.X_real[:, index_0], Exp.X_real[:, index_1], Exp.X_real[:, index_2],
vmin=Y_real_range[0], vmax=Y_real_range[1], linewidths=1, alpha=0.7,
edgecolor='k', s=60, c=Y_real)
# Obtain axes limits
xlim_plot = list(ax.set_xlim(X_ranges[index_0]))
ylim_plot = list(ax.set_ylim(X_ranges[index_1]))
zlim_plot = list(ax.set_zlim(X_ranges[index_2]))
# set axis labels and ticks
ax.set_xlabel(X_names[index_0], labelpad=15)
ax.set_ylabel(X_names[index_1], labelpad=15)
ax.set_zlabel(X_names[index_2], labelpad=15)
ax.set_xticks(set_axis_values(xlim_plot, n_tick_sections))
ax.set_xticklabels(set_axis_values(X_ranges[index_0], n_tick_sections))
ax.set_yticks(set_axis_values(ylim_plot, n_tick_sections))
ax.set_yticklabels(set_axis_values(X_ranges[index_1], n_tick_sections))
ax.set_zticks(set_axis_values(ylim_plot, n_tick_sections))
ax.set_zticklabels(set_axis_values(X_ranges[index_2], n_tick_sections))
# set colorbar for response
cbar = fig.colorbar(im, ax=ax).set_label(label=Y_name, rotation=270, labelpad=20)
ax.view_init(30, 45)
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, filename + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def response_surface_exp(
Exp: Experiment,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
fixed_values: Optional[Union[ArrayLike1d, float]] = [],
fixed_values_real: Optional[Union[ArrayLike1d, float]] = [],
baseline: Optional[str] = 'left',
show_confidence: Optional[bool] = False,
mesh_size: Optional[int] = 41,
save_fig: Optional[bool] = False):
"""Show a 3-dimensional response surface
in a real scale
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
Y_real_range : Optional[ArrayLike1d], optional
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default []
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default []
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
mesh_size : Optional[int], optional
mesh size, by default 41
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Create 2D mesh test points
X_test, X1_test, X2_test = create_full_X_test_2d(X_ranges=Exp.X_ranges,
x_indices=x_indices,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# Make predictions using the GP model
if show_confidence:
Y_test, Y_test_lower, Y_test_upper = Exp.predict_real(X_test, show_confidence = True)
Y_test_2D = transform_Y_mesh_2d(Y_test, mesh_size)
Y_test_lower_2D = transform_Y_mesh_2d(Y_test_lower, mesh_size)
Y_test_upper_2D = transform_Y_mesh_2d(Y_test_upper, mesh_size)
else:
Y_test = Exp.predict_real(X_test)
Y_test_2D = transform_Y_mesh_2d(Y_test, mesh_size)
Y_test_lower_2D, Y_test_upper_2D = None, None
response_surface(X1_test=X1_test,
X2_test=X2_test,
Y_real=Y_test_2D,
Y_real_lower=Y_test_lower_2D,
Y_real_upper=Y_test_upper_2D,
Y_real_range=Y_real_range,
Y_name=Exp.Y_names[0],
n_dim=Exp.n_dim,
log_flag=log_flag,
x_indices=x_indices,
X_ranges=Exp.X_ranges,
X_names=Exp.X_names,
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter=Exp.n_points - Exp.n_points_init)
def objective_surface_exp(
Exp: Experiment,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
fixed_values: Optional[Union[ArrayLike1d, float]] = [],
fixed_values_real: Optional[Union[ArrayLike1d, float]] = [],
baseline: Optional[str] = 'left',
mesh_size: Optional[int] = 41,
save_fig: Optional[bool] = False):
"""Show a 3-dimensional response surface
in a real scale
Using the experiment object
Parameters
----------
Exp : Experiment
Experiment object
Y_real_range : Optional[ArrayLike1d], optional
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default []
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default []
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
mesh_size : Optional[int], optional
mesh size, by default 41
save_fig: Optional[bool], optional
if true save the plot
by default False
"""
# Create 2D mesh test points
X_test, X1_test, X2_test = create_full_X_test_2d(X_ranges=Exp.X_ranges,
x_indices=x_indices,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# Calculate objective function value
Y_obj_test = eval_objective_func(X_test, Exp.X_ranges, Exp.objective_func)
Y_obj_test_2D = transform_Y_mesh_2d(Y_obj_test, mesh_size)
Y_obj_lower_2D, Y_obj_upper_2D = None, None
response_surface(X1_test=X1_test,
X2_test=X2_test,
Y_real=Y_obj_test_2D,
Y_real_lower=Y_obj_lower_2D,
Y_real_upper=Y_obj_upper_2D,
Y_real_range=Y_real_range,
Y_name=Exp.Y_names[0],
n_dim=Exp.n_dim,
log_flag=log_flag,
x_indices=x_indices,
X_ranges=Exp.X_ranges,
X_names=Exp.X_names[x_indices],
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter='objective')
def objective_surface(
objective_func: object,
X_ranges: MatrixLike2d,
Y_name: Optional[str] = None,
Y_real_range: Optional[ArrayLike1d] = None,
log_flag: Optional[bool] = False,
x_indices: Optional[List[int]] = [0, 1],
fixed_values: Optional[Union[ArrayLike1d, float]] = [],
fixed_values_real: Optional[Union[ArrayLike1d, float]] = [],
baseline: Optional[str] = 'left',
X_names: Optional[List[str]] = None,
mesh_size: Optional[int] = 41,
save_fig: Optional[bool] = False,
name: Optional[str] = 'simple_experiment'
):
"""Show a 3-dimensional response surface
in a real scale
Using the experiment object
Parameters
----------
objective_func : function object
a objective function to optimize
X_ranges : MatrixLike2d,
list of x ranges
Y_name : Optional[str], optional
Name of Y variable, by default None
Y_real_range : Optional[ArrayLike1d], optional
Ranges of the response, [lb, rb]
to show on the plot, by default None
log_flag : Optional[bool], optional
flag to plot in a log scale
x_indices : Optional[List[int]], optional
indices of two x variables, by default [0, 1]
fixed_values : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a unit scale, by default []
fixed_values_real : Optional[Union[ArrayLike1d, float]], optional
fixed values in other dimensions,
in a real scale, by default []
baseline : Optional[str], optional
the choice of baseline, must be left, right or center
X_name: Optional[List(str)], optional
Names of X varibale shown as x,y,z-labels
by default None
mesh_size : Optional[int], optional
mesh size, by default 41
save_fig: Optional[bool], optional
if true save the plot
by default False
name : Optional[str], optional
Name of the objective function,
by default 'simple_experiment'
"""
n_dim = len(X_ranges)
# Create 2D mesh test points
X_test, X1_test, X2_test = create_full_X_test_2d(X_ranges=X_ranges,
x_indices=x_indices,
fixed_values=fixed_values,
fixed_values_real=fixed_values_real,
baseline=baseline,
mesh_size=mesh_size)
# Calculate objective function value
Y_obj_test = eval_objective_func(X_test, X_ranges, objective_func)
Y_obj_test_2D = transform_Y_mesh_2d(Y_obj_test, mesh_size)
Y_obj_lower_2D, Y_obj_upper_2D = None, None
# Set up the path to save graphical results
parent_dir = os.getcwd()
exp_path = os.path.join(parent_dir, name)
response_surface(X1_test=X1_test,
X2_test=X2_test,
Y_real=Y_obj_test_2D,
Y_real_lower=Y_obj_lower_2D,
Y_real_upper=Y_obj_upper_2D,
Y_real_range=Y_real_range,
Y_name=Y_name,
n_dim=n_dim,
log_flag=log_flag,
x_indices=x_indices,
X_ranges= X_ranges,
X_names= X_names,
save_fig=save_fig,
save_path=exp_path,
i_iter='objective')
#%% Functions for Pareto front visualization
def pareto_front(
y1: MatrixLike2d,
y2: MatrixLike2d,
Y_names: Optional[List[str]] = None,
fill: Optional[bool] = True,
diagonal: Optional[bool] = True,
save_fig: Optional[bool] = False,
save_path: Optional[str] = None,
i_iter: Optional[Union[str, int]] = ''):
"""Plot parity plot comparing the ground true
objective function values against predicted model mean
Parameters
----------
y1 : MatrixLike2d
Ground truth values
y2 : MatrixLike2d
Model predicted values
fill: Optional[bool], optional
if true fill the space enclosed by the points
by default True
diagonal: Optional[bool], optional
if true plot the y = x line
by default True
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
i_iter: Optional[Union[str, int]], optional
Iteration number to add to the figure name
by default ''
"""
y1 = np.squeeze(tensor_to_np(y1))
y2 = np.squeeze(tensor_to_np(y2))
# Set default axis names
if Y_names is None:
Y_names = ['y1', 'y2']
fig, ax = plt.subplots(figsize=(6,6))
ax.scatter(y1, y2, s=60, alpha = 0.5)
if fill:
ax.fill_between(y1, y2, color = 'steelblue', alpha=0.3)
lims = [
np.min([y1.min(), y2.min()]), # min of both axes
np.max([y1.max(), y2.max()]), # max of both axes
]
# number of sections in the axis
nsections = 5
# now plot both limits against eachother
if diagonal:
ax.plot(lims, lims, 'k--', alpha=0.75, zorder=0)
ax.set_xlim([y1.min(), y1.max()]) #ax.set_xlim(lims)
ax.set_ylim([y2.min(), y2.max()]) #ax.set_ylim(lims)
ax.set_xticks(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_yticks(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_xticklabels(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_yticklabels(np.around(np.linspace(lims[0], lims[1], nsections), 2))
ax.set_xlabel(Y_names[0])
ax.set_ylabel(Y_names[1])
plt.show()
# save the figure as png
if save_fig:
if save_path is None:
save_path = os.getcwd()
if not os.path.exists(save_path): os.makedirs(save_path)
fig.savefig(os.path.join(save_path, 'pareto_'+ str(i_iter) + '.' + figformat),
bbox_inches="tight", transparent=backgroundtransparency)
def pareto_front_exp(
Exp: Union[WeightedMOOExperiment, EHVIMOOExperiment],
fill: Optional[bool] = True,
diagonal: Optional[bool] = True,
save_fig: Optional[bool] = False,
design_name: Optional[Union[str, int]] = 'final'):
"""Plot parity plot comparing the ground true
objective function values against predicted model mean
Using MOOExperiment object
Parameters
---------
Exp: Union[WeightedMOOExperiment, EHVIMOOExperiment]
MOOExperiment object
fill: Optional[bool], optional
if true fill the space enclosed by the points
by default True
diagonal: Optional[bool], optional
if true plot the y = x line
by default True
save_fig: Optional[bool], optional
if true save the plot
by default False
save_path: Optional[str], optional
Path where the figure is being saved
by default the current directory
design_name : Optional[Union[str, int]], optional
Design name to add to the figure name
by default 'final'
"""
Y_real_opts = Exp.Y_real_opts
pareto_front(y1=Y_real_opts[:, 0],
y2=Y_real_opts[:, 1],
Y_names=Exp.Y_names,
fill=fill,
diagonal=diagonal,
save_fig=save_fig,
save_path=Exp.exp_path,
i_iter = design_name)
| 37.428448
| 131
| 0.617474
| 12,050
| 86,572
| 4.229378
| 0.041909
| 0.030904
| 0.012165
| 0.019013
| 0.868986
| 0.84697
| 0.828369
| 0.81067
| 0.797975
| 0.787615
| 0
| 0.01647
| 0.289551
| 86,572
| 2,312
| 132
| 37.444637
| 0.812145
| 0.380181
| 0
| 0.73242
| 0
| 0
| 0.020078
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027397
| false
| 0
| 0.014612
| 0
| 0.044749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92d39ecfbbbf31a27de246c2dc6206fd5a3ca59e
| 4,490
|
py
|
Python
|
rl_with_videos/preprocessors/convnet.py
|
RaiAnant/rl_with_videos
|
6f9cbb378da79bd2d4388b4c3682b92bccae95d3
|
[
"MIT"
] | null | null | null |
rl_with_videos/preprocessors/convnet.py
|
RaiAnant/rl_with_videos
|
6f9cbb378da79bd2d4388b4c3682b92bccae95d3
|
[
"MIT"
] | null | null | null |
rl_with_videos/preprocessors/convnet.py
|
RaiAnant/rl_with_videos
|
6f9cbb378da79bd2d4388b4c3682b92bccae95d3
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from rl_with_videos.models.feedforward import feedforward_model
from rl_with_videos.utils.keras import PicklableKerasModel
def conv_layers(images, conv_filters, conv_kernel_sizes, pool_sizes, pool_strides, pool_type, *args, **kwargs):
conv_out = images
for filters, kernel_size, pool_size, strides in zip(
conv_filters, conv_kernel_sizes, pool_sizes, pool_strides):
print("kwargs:", kwargs)
conv_out = tf.keras.layers.Conv2D(
filters=filters,
kernel_size=kernel_size,
padding="SAME",
activation=tf.nn.relu,
*args,
**kwargs
)(conv_out)
conv_out = getattr(tf.keras.layers, pool_type)(
pool_size=pool_size, strides=strides
)(conv_out)
return conv_out
def convnet_preprocessor(
input_shapes,
image_shape,
output_size,
conv_filters=(32, 32),
conv_kernel_sizes=((5, 5), (5, 5)),
pool_type='MaxPool2D',
pool_sizes=((2, 2), (2, 2)),
pool_strides=(2, 2),
dense_hidden_layer_sizes=(64, 64),
data_format='channels_last',
name="convnet_preprocessor",
make_picklable=True,
*args,
**kwargs):
if data_format == 'channels_last':
H, W, C = image_shape
elif data_format == 'channels_first':
C, H, W = image_shape
inputs = [
tf.keras.layers.Input(shape=input_shape)
for input_shape in input_shapes
]
concatenated_input = tf.keras.layers.Lambda(
lambda x: tf.concat(x, axis=-1)
)(inputs)
images_flat, input_raw = tf.keras.layers.Lambda(
lambda x: [x[..., :H * W * C], x[..., H * W * C:]]
)(concatenated_input)
images = tf.keras.layers.Reshape(image_shape)(images_flat)
conv_out = conv_layers(images, conv_filters, conv_kernel_sizes, pool_sizes, pool_strides, pool_type, *args, **kwargs)
flattened = tf.keras.layers.Flatten()(conv_out)
concatenated_output = tf.keras.layers.Lambda(
lambda x: tf.concat(x, axis=-1)
)([flattened, input_raw])
output = (
feedforward_model(
input_shapes=(concatenated_output.shape[1:].as_list(), ),
output_size=output_size,
hidden_layer_sizes=dense_hidden_layer_sizes,
activation='relu',
output_activation='linear',
*args,
**kwargs
)([concatenated_output])
if dense_hidden_layer_sizes
else concatenated_output)
model = PicklableKerasModel(inputs, output, name=name)
return model
def convnet_preprocessor_softmax(
input_shapes,
image_shape,
output_size,
conv_filters=(32, 32),
conv_kernel_sizes=((5, 5), (5, 5)),
pool_type='MaxPool2D',
pool_sizes=((2, 2), (2, 2)),
pool_strides=(2, 2),
dense_hidden_layer_sizes=(64, 64),
data_format='channels_last',
name="convnet_preprocessor_softmax",
make_picklable=True,
*args,
**kwargs):
"""
Just use it to test output
"""
if data_format == 'channels_last':
H, W, C = image_shape
elif data_format == 'channels_first':
C, H, W = image_shape
inputs = [
tf.keras.layers.Input(shape=input_shape)
for input_shape in input_shapes
]
concatenated_input = tf.keras.layers.Lambda(
lambda x: tf.concat(x, axis=-1)
)(inputs)
images_flat, input_raw = tf.keras.layers.Lambda(
lambda x: [x[..., :H * W * C], x[..., H * W * C:]]
)(concatenated_input)
images = tf.keras.layers.Reshape(image_shape)(images_flat)
conv_out = conv_layers(images, conv_filters, conv_kernel_sizes, pool_sizes, pool_strides, pool_type, *args, **kwargs)
flattened = tf.keras.layers.Flatten()(conv_out)
concatenated_output = tf.keras.layers.Lambda(
lambda x: tf.concat(x, axis=-1)
)([flattened, input_raw])
output = (
feedforward_model(
input_shapes=(concatenated_output.shape[1:].as_list(), ),
output_size=output_size,
hidden_layer_sizes=dense_hidden_layer_sizes,
activation='relu',
output_activation='softmax',
*args,
**kwargs
)([concatenated_output])
if dense_hidden_layer_sizes
else concatenated_output)
model = PicklableKerasModel(inputs, output, name=name)
return model
| 30.753425
| 121
| 0.615145
| 543
| 4,490
| 4.80663
| 0.165746
| 0.037548
| 0.069732
| 0.048276
| 0.815326
| 0.794636
| 0.794636
| 0.794636
| 0.794636
| 0.777011
| 0
| 0.013711
| 0.269042
| 4,490
| 145
| 122
| 30.965517
| 0.781536
| 0.005791
| 0
| 0.79661
| 0
| 0
| 0.040018
| 0.006295
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025424
| false
| 0
| 0.025424
| 0
| 0.076271
| 0.008475
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92da5498f3ef4b946555bb88c59571c68016e8e5
| 1,469
|
py
|
Python
|
venv/lib/python2.7/site-packages/pychart/afm/Courier.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | 1
|
2019-12-19T01:53:13.000Z
|
2019-12-19T01:53:13.000Z
|
venv/lib/python2.7/site-packages/pychart/afm/Courier.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
venv/lib/python2.7/site-packages/pychart/afm/Courier.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
# AFM font Courier (path: /usr/share/fonts/afms/adobe/pcrr8a.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
import dir
dir.afm["Courier"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 500, 600, 600, 600, 600, 500, 600, 600, 600, 600, 600, 600, 600, 600, 500, 600, 500, 600, 600, 600, 600, 600, 600, 600, 600, 500, 600, 600, 500, 600, 600, 600, 600, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 600, 500, 600, 500, 500, 500, 500, 600, 600, 600, 600, 500, 500, 500, 500, 500, 600, 500, 500, 500, 600, 500, 500, 600, 600, 600, 600, )
| 244.833333
| 1,283
| 0.619469
| 285
| 1,469
| 3.192982
| 0.105263
| 0.89011
| 1.246154
| 1.556044
| 0.830769
| 0.827473
| 0.797802
| 0.787912
| 0.778022
| 0.774725
| 0
| 0.6356
| 0.189244
| 1,469
| 5
| 1,284
| 293.8
| 0.128463
| 0.113683
| 0
| 0
| 0
| 0
| 0.005393
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 16
|
1390dcf2de83392ae8ff7ff4c5925775773c0137
| 5,765
|
py
|
Python
|
students/migrations/0002_auto_20170225_1023.py
|
Stanislav-Rybonka/studentsdb
|
efb1440db4ec640868342a5f74cd48784268781f
|
[
"MIT"
] | 1
|
2020-03-02T20:55:04.000Z
|
2020-03-02T20:55:04.000Z
|
students/migrations/0002_auto_20170225_1023.py
|
Stanislav-Rybonka/studentsdb
|
efb1440db4ec640868342a5f74cd48784268781f
|
[
"MIT"
] | 6
|
2020-06-05T17:18:41.000Z
|
2022-03-11T23:14:47.000Z
|
students/migrations/0002_auto_20170225_1023.py
|
Stanislav-Rybonka/studentsdb
|
efb1440db4ec640868342a5f74cd48784268781f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-02-25 10:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('students', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='journal',
options={},
),
migrations.AddField(
model_name='journal',
name='present_day_1',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_10',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_11',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_12',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_13',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_14',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_15',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_16',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_17',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_18',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_19',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_2',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_20',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_21',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_22',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_23',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_24',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_25',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_26',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_27',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_28',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_29',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_3',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_30',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_4',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_5',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_6',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_7',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_8',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='journal',
name='present_day_9',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='journal',
name='date',
field=models.DateField(verbose_name='Date'),
),
migrations.AlterField(
model_name='journal',
name='student',
field=models.ForeignKey(max_length=256, on_delete=django.db.models.deletion.CASCADE, related_name='students', to='students.Student', unique_for_month='date', verbose_name='Student name'),
),
]
| 31.850829
| 199
| 0.550737
| 512
| 5,765
| 5.996094
| 0.160156
| 0.118241
| 0.166775
| 0.208469
| 0.837134
| 0.837134
| 0.799674
| 0.799674
| 0.784039
| 0.784039
| 0
| 0.019603
| 0.33634
| 5,765
| 180
| 200
| 32.027778
| 0.782802
| 0.011795
| 0
| 0.734104
| 1
| 0
| 0.125922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017341
| 0
| 0.034682
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b94f4f1646e795434c368d635a1a0be4d7a72ed5
| 1,968
|
py
|
Python
|
tests/unit/test_eventalign_unit.py
|
a-sneddon/eventparser
|
d30bcf2833816d6e0a7567486d789943dc471112
|
[
"MIT"
] | null | null | null |
tests/unit/test_eventalign_unit.py
|
a-sneddon/eventparser
|
d30bcf2833816d6e0a7567486d789943dc471112
|
[
"MIT"
] | null | null | null |
tests/unit/test_eventalign_unit.py
|
a-sneddon/eventparser
|
d30bcf2833816d6e0a7567486d789943dc471112
|
[
"MIT"
] | null | null | null |
import pytest
from eventparser.eventalign import Line
def test_is_valid_with_valid_data():
line = Line("ENST0", 123, "read_A", "GCACT", "GCACT", 1, 3)
assert line.is_valid() == True
def test_is_valid_with_zero_position():
line = Line("ENST0", 0, "read_A", "GCACT", "GCACT", 1, 3)
assert line.is_valid() == True
def test_is_valid_with_invalid_position():
line = Line("ENST0", -1, "read_A", "GCACT", "GCACT", 1, 3)
assert line.is_valid() == False
def test_is_valid_with_invalid_ref_kmer():
line = Line("ENST0", 123, "read_A", "ACNTC", "GCACT", 1, 3)
assert line.is_valid() == False
def test_is_valid_with_invalid_model_kmer():
line = Line("ENST0", 123, "read_A", "GCACT", "GCACTYY", 1, 3)
assert line.is_valid() == False
def test_is_valid_with_mismatching_ref_model_kmers():
line = Line("ENST0", 123, "read_A", "GCACT", "GCACC", 1, 3)
assert line.is_valid() == False
def test_is_valid_with_reverse_complement_model_kmer():
line = Line("ENST0", 123, "read_A", "GCACT", "AGTGC", 1, 3)
assert line.is_valid() == True
def test_is_valid_with_end_after_start_index():
line = Line("ENST0", 123, "read_A", "GCACT", "AGTGC", 10, 11)
assert line.is_valid() == True
def test_is_valid_with_end_before_start_index():
line = Line("ENST0", 123, "read_A", "GCACT", "AGTGC", 4, 3)
assert line.is_valid() == False
def test_is_valid_with_end_and_start_index_same():
line = Line("ENST0", 123, "read_A", "GCACT", "AGTGC", 4, 4)
assert line.is_valid() == False
def test_is_valid_with_start_index_negative():
line = Line("ENST0", 123, "read_A", "GCACT", "AGTGC", -1, 4)
assert line.is_valid() == False
def test_is_valid_with_start_index_zero():
line = Line("ENST0", 123, "read_A", "GCACT", "AGTGC", 0, 4)
assert line.is_valid() == True
def test_is_valid_with_end_index_negative():
line = Line("ENST0", 123, "read_A", "GCACT", "AGTGC", -10, -9)
assert line.is_valid() == False
| 35.781818
| 66
| 0.670732
| 309
| 1,968
| 3.925566
| 0.158576
| 0.150041
| 0.096455
| 0.150041
| 0.834295
| 0.800495
| 0.792251
| 0.737016
| 0.711459
| 0.618302
| 0
| 0.046667
| 0.161585
| 1,968
| 54
| 67
| 36.444444
| 0.688485
| 0
| 0
| 0.317073
| 0
| 0
| 0.139736
| 0
| 0
| 0
| 0
| 0
| 0.317073
| 1
| 0.317073
| false
| 0
| 0.04878
| 0
| 0.365854
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b95c0cd67914f5ff97a172b8596cce7346f60940
| 21,801
|
py
|
Python
|
caffe2/python/operator_test/video_input_op_test.py
|
freedomtan/caffe2
|
e1f614a5f8ae92f4ecb828e1d5f84d2cd1fe12bd
|
[
"Apache-2.0"
] | 585
|
2015-08-10T02:48:52.000Z
|
2021-12-01T08:46:59.000Z
|
caffe2/python/operator_test/video_input_op_test.py
|
PDFxy/caffe2
|
28523ff1ff33f18eaf8b04cc4e0f308826e1861a
|
[
"Apache-2.0"
] | 23
|
2015-08-30T11:54:51.000Z
|
2017-03-06T03:01:07.000Z
|
caffe2/python/operator_test/video_input_op_test.py
|
PDFxy/caffe2
|
28523ff1ff33f18eaf8b04cc4e0f308826e1861a
|
[
"Apache-2.0"
] | 183
|
2015-08-10T02:49:04.000Z
|
2021-12-01T08:47:13.000Z
|
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
try:
import lmdb
except ImportError:
raise unittest.SkipTest('python-lmdb is not installed')
import sys
import os
import shutil
import tempfile
from caffe2.proto import caffe2_pb2
from caffe2.python import workspace, model_helper
import numpy as np
class VideoInputOpTest(unittest.TestCase):
def create_a_list(self, output_file, line, n):
# create a list that repeat a line n times
# used for creating a list file for simple test input
with open(output_file, 'w') as file:
for _i in range(n):
file.write(line)
def create_video_db(self, list_file, output_file, use_list=False):
# Write to lmdb database...
LMDB_MAP_SIZE = 1 << 40 # MODIFY
env = lmdb.open(output_file, map_size=LMDB_MAP_SIZE)
total_size = 0
file_name = []
start_frame = []
label = []
index = 0
with env.begin(write=True) as txn:
with open(list_file, 'r') as data:
for line in data:
p = line.split()
file_name = p[0]
start_frame = int(p[1])
label = int(p[2])
if not use_list:
with open(file_name, mode='rb') as file:
video_data = file.read()
else:
video_data = file_name
tensor_protos = caffe2_pb2.TensorProtos()
video_tensor = tensor_protos.protos.add()
video_tensor.data_type = 4 # string data
video_tensor.string_data.append(video_data)
label_tensor = tensor_protos.protos.add()
label_tensor.data_type = 2
label_tensor.int32_data.append(label)
start_frame_tensor = tensor_protos.protos.add()
start_frame_tensor.data_type = 2
start_frame_tensor.int32_data.append(start_frame)
txn.put(
'{}'.format(index).encode('ascii'),
tensor_protos.SerializeToString()
)
index = index + 1
total_size = total_size + len(video_data) + sys.getsizeof(int)
return total_size
# sample one clip randomly from the video
def test_rgb_with_temporal_jittering(self):
random_label = np.random.randint(0, 100)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, 16)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
# build the model
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=16,
clip_per_video=1,
crop_size=112,
scale_w=171,
scale_h=128,
length_rgb=8,
sampling_rate_rgb=1,
decode_type=0,
video_res_type=0)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(label, random_label)
np.testing.assert_equal(data.shape, [16, 3, 8, 112, 112])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# sample multiple clips uniformly from the video
def test_rgb_with_uniform_sampling(self):
random_label = np.random.randint(0, 100)
clip_per_video = np.random.randint(2, 11)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, 16)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
# build the model
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=3,
clip_per_video=clip_per_video,
crop_size=112,
scale_w=171,
scale_h=128,
length_rgb=8,
sampling_rate_rgb=1,
decode_type=1,
video_res_type=0)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(label, random_label)
np.testing.assert_equal(data.shape, [3 * clip_per_video, 3, 8, 112, 112])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# sample multiple clips uniformly from the video, rectangle cropping.
# VideoResType is USE_WIDTH_HEIGHT
def test_rgb_with_uniform_sampling_rectangle_cropping_use_width_height(self):
batch_size = 3
crop_height, crop_width = 112, 144
random_label = np.random.randint(0, 100)
clip_per_video = np.random.randint(2, 11)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, 16)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
# build the model
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=batch_size,
clip_per_video=clip_per_video,
crop_height=crop_height,
crop_width=crop_width,
scale_w=171,
scale_h=128,
length_rgb=8,
sampling_rate_rgb=1,
color_jitter=True,
color_lighting=True,
decode_type=1,
video_res_type=0)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(
label.shape, [batch_size * clip_per_video])
for i in range(batch_size * clip_per_video):
np.testing.assert_equal(label[i], random_label)
np.testing.assert_equal(
data.shape,
[batch_size * clip_per_video, 3, 8, crop_height, crop_width])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# sample multiple clips uniformly from the video, rectangle cropping.
# VideoResType is USE_MINIMAL_WIDTH_HEIGHT
def test_rgb_with_uniform_sampling_rectangle_cropping_use_minimal_width_height(
self
):
batch_size = 3
height_min, width_min = 128, 166
crop_height, crop_width = 112, 144
random_label = np.random.randint(0, 100)
clip_per_video = np.random.randint(2, 11)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, 16)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
# build the model
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=batch_size,
clip_per_video=clip_per_video,
height_min=height_min,
width_min=width_min,
crop_height=crop_height,
crop_width=crop_width,
length_rgb=8,
sampling_rate_rgb=1,
color_jitter=True,
color_lighting=True,
decode_type=1,
video_res_type=1)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(
label.shape, [batch_size * clip_per_video])
for i in range(batch_size * clip_per_video):
np.testing.assert_equal(label[i], random_label)
np.testing.assert_equal(
data.shape,
[batch_size * clip_per_video, 3, 8, crop_height, crop_width])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# sample multiple clips uniformly from the video, while color jitterring
# and lighting are enabled
def test_rgb_with_uniform_sampling_color_jittering_lighting(self):
batch_size = 3
random_label = np.random.randint(0, 100)
clip_per_video = np.random.randint(2, 11)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, 16)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
# build the model
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=batch_size,
clip_per_video=clip_per_video,
crop_size=112,
scale_w=171,
scale_h=128,
length_rgb=8,
sampling_rate_rgb=1,
color_jitter=True,
color_lighting=True,
decode_type=1,
video_res_type=0)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(
label.shape, [batch_size * clip_per_video])
for i in range(batch_size * clip_per_video):
np.testing.assert_equal(label[i], random_label)
np.testing.assert_equal(
data.shape,
[batch_size * clip_per_video, 3, 8, 112, 112])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# sample multiple clips uniformly from the video
def test_rgb_with_uniform_sampling_and_multi_cropping(self):
# we take left-top, central-top, right-top, left-bottom, central-bottom,
# right-bottom and central-central croppings as well as their mirrorings
# In total, 14 croppings
multi_crop_count = 14
batch_size = 3
random_label = np.random.randint(0, 100)
clip_per_video = np.random.randint(2, 11)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, 16)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
# build the model
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=batch_size,
clip_per_video=clip_per_video,
crop_size=112,
scale_w=171,
scale_h=128,
length_rgb=8,
sampling_rate_rgb=1,
decode_type=1,
multi_crop=True,
video_res_type=0)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(
label.shape, [batch_size * clip_per_video * multi_crop_count])
for i in range(batch_size * clip_per_video * multi_crop_count):
np.testing.assert_equal(label[i], random_label)
np.testing.assert_equal(
data.shape,
[batch_size * clip_per_video * multi_crop_count, 3, 8, 112, 112])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# test optical flow
def test_optical_flow_with_temporal_jittering(self):
random_label = np.random.randint(0, 100)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, 16)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=16,
clip_per_video=1,
crop_size=112,
scale_w=171,
scale_h=128,
length_of=8,
sampling_rate_of=1,
frame_gap_of=1,
decode_type=0,
video_res_type=0,
get_rgb=False,
get_optical_flow=True)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(label, random_label)
np.testing.assert_equal(data.shape, [16, 2, 8, 112, 112])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# test optical flow, rectangle cropping, VideoResType is
# USE_WIDTH_HEIGHT
def test_optical_flow_with_rectangle_cropping_use_width_height(self):
batch_size = 16
scale_h, scale_w = 128, 166
crop_height, crop_width = 112, 144
random_label = np.random.randint(0, 100)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, batch_size)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=batch_size,
clip_per_video=1,
scale_h=scale_h,
scale_w=scale_w,
crop_height=crop_height,
crop_width=crop_width,
length_of=8,
sampling_rate_of=1,
frame_gap_of=1,
decode_type=0,
video_res_type=0,
get_rgb=False,
get_optical_flow=True)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(label.shape, [batch_size])
for i in range(batch_size):
np.testing.assert_equal(label[i], random_label)
np.testing.assert_equal(
data.shape, [batch_size, 2, 8, crop_height, crop_width])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# test optical flow, rectangle cropping, VideoResType is
# USE_MINIMAL_WIDTH_HEIGHT
def test_optical_flow_with_rectangle_cropping_use_minimal_width_height(self):
batch_size = 16
height_min, width_min = 128, 166
crop_height, crop_width = 112, 144
random_label = np.random.randint(0, 100)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, batch_size)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=batch_size,
clip_per_video=1,
height_min=height_min,
width_min=width_min,
crop_height=crop_height,
crop_width=crop_width,
length_of=8,
sampling_rate_of=1,
frame_gap_of=1,
decode_type=0,
video_res_type=1,
get_rgb=False,
get_optical_flow=True)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(label.shape, [batch_size])
for i in range(batch_size):
np.testing.assert_equal(label[i], random_label)
np.testing.assert_equal(
data.shape, [batch_size, 2, 8, crop_height, crop_width])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
# test optical flow, multi-cropping
def test_optical_flow_with_multi_cropping(self):
multi_crop_count = 14
batch_size = 16
height_min, width_min = 128, 166
crop_height, crop_width = 112, 144
random_label = np.random.randint(0, 100)
VIDEO = "/mnt/vol/gfsdataswarm-oregon/users/trandu/sample.avi"
if not os.path.exists(VIDEO):
raise unittest.SkipTest('Missing data')
temp_list = tempfile.NamedTemporaryFile(delete=False).name
line_str = '{} 0 {}\n'.format(VIDEO, random_label)
self.create_a_list(temp_list, line_str, batch_size)
video_db_dir = tempfile.mkdtemp()
self.create_video_db(temp_list, video_db_dir)
model = model_helper.ModelHelper(name="Video Loader from LMDB")
reader = model.CreateDB("sample", db=video_db_dir, db_type="lmdb")
model.net.VideoInput(
reader,
["data", "label"],
name="data",
batch_size=batch_size,
clip_per_video=1,
height_min=height_min,
width_min=width_min,
crop_height=crop_height,
crop_width=crop_width,
length_of=8,
sampling_rate_of=1,
frame_gap_of=1,
decode_type=0,
multi_crop=True,
video_res_type=1,
get_rgb=False,
get_optical_flow=True)
workspace.RunNetOnce(model.param_init_net)
workspace.RunNetOnce(model.net)
data = workspace.FetchBlob("data")
label = workspace.FetchBlob("label")
np.testing.assert_equal(label.shape, [batch_size * multi_crop_count])
for i in range(batch_size * multi_crop_count):
np.testing.assert_equal(label[i], random_label)
np.testing.assert_equal(
data.shape,
[batch_size * multi_crop_count, 2, 8, crop_height, crop_width])
os.remove(temp_list)
shutil.rmtree(video_db_dir)
if __name__ == "__main__":
unittest.main()
| 37.848958
| 83
| 0.613825
| 2,727
| 21,801
| 4.635864
| 0.096076
| 0.028239
| 0.031641
| 0.042715
| 0.839978
| 0.830407
| 0.822101
| 0.820361
| 0.813479
| 0.801851
| 0
| 0.021826
| 0.287556
| 21,801
| 575
| 84
| 37.914783
| 0.792107
| 0.073712
| 0
| 0.814894
| 0
| 0
| 0.065611
| 0.025905
| 0
| 0
| 0
| 0
| 0.057447
| 1
| 0.025532
| false
| 0
| 0.029787
| 0
| 0.059574
| 0.002128
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9708bcfb5d394a968c8cb530f00082e64937860
| 73
|
py
|
Python
|
api/helpers.py
|
elainevoice/backend
|
9b5fef59001fd6c2040affc80cd5cb9690c73795
|
[
"Apache-2.0"
] | 3
|
2020-12-28T16:45:56.000Z
|
2021-12-18T08:38:29.000Z
|
api/helpers.py
|
elainevoice/backend
|
9b5fef59001fd6c2040affc80cd5cb9690c73795
|
[
"Apache-2.0"
] | 1
|
2020-12-14T13:09:42.000Z
|
2020-12-14T13:09:42.000Z
|
api/helpers.py
|
elainevoice/backend
|
9b5fef59001fd6c2040affc80cd5cb9690c73795
|
[
"Apache-2.0"
] | null | null | null |
from api.config import models
def get_taco_models():
return models
| 12.166667
| 29
| 0.753425
| 11
| 73
| 4.818182
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191781
| 73
| 5
| 30
| 14.6
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6a3afa006a57a5677ca63007afeaba3503803610
| 92,637
|
py
|
Python
|
pypbbot/protocol/onebot_event_pb2.py
|
PHIKN1GHT/pypbbot_archived
|
8ab70830509c43b0babc53c9972d0a73481bdaa2
|
[
"MIT"
] | 11
|
2021-01-31T12:58:39.000Z
|
2021-10-15T02:53:13.000Z
|
pypbbot/protocol/onebot_event_pb2.py
|
PHIKN1GHT/pypbbot_archived
|
8ab70830509c43b0babc53c9972d0a73481bdaa2
|
[
"MIT"
] | null | null | null |
pypbbot/protocol/onebot_event_pb2.py
|
PHIKN1GHT/pypbbot_archived
|
8ab70830509c43b0babc53c9972d0a73481bdaa2
|
[
"MIT"
] | 4
|
2021-01-31T12:58:42.000Z
|
2021-09-11T17:35:11.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: onebot_event.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import onebot_base_pb2 as onebot__base__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='onebot_event.proto',
package='onebot',
syntax='proto3',
serialized_options=None,
serialized_pb=b'\n\x12onebot_event.proto\x12\x06onebot\x1a\x11onebot_base.proto\"\xba\x03\n\x13PrivateMessageEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x14\n\x0cmessage_type\x18\x04 \x01(\t\x12\x10\n\x08sub_type\x18\x05 \x01(\t\x12\x12\n\nmessage_id\x18\x06 \x01(\x05\x12\x0f\n\x07user_id\x18\x07 \x01(\x03\x12 \n\x07message\x18\x08 \x03(\x0b\x32\x0f.onebot.Message\x12\x13\n\x0braw_message\x18\t \x01(\t\x12\x0c\n\x04\x66ont\x18\n \x01(\x05\x12\x32\n\x06sender\x18\x0b \x01(\x0b\x32\".onebot.PrivateMessageEvent.Sender\x12\x36\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32&.onebot.PrivateMessageEvent.ExtraEntry\x1a\x45\n\x06Sender\x12\x0f\n\x07user_id\x18\x01 \x01(\x03\x12\x10\n\x08nickname\x18\x02 \x01(\t\x12\x0b\n\x03sex\x18\x03 \x01(\t\x12\x0b\n\x03\x61ge\x18\x04 \x01(\x05\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xfc\x04\n\x11GroupMessageEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x14\n\x0cmessage_type\x18\x04 \x01(\t\x12\x10\n\x08sub_type\x18\x05 \x01(\t\x12\x12\n\nmessage_id\x18\x06 \x01(\x05\x12\x10\n\x08group_id\x18\x07 \x01(\x03\x12\x0f\n\x07user_id\x18\x08 \x01(\x03\x12\x36\n\tanonymous\x18\t \x01(\x0b\x32#.onebot.GroupMessageEvent.Anonymous\x12 \n\x07message\x18\n \x03(\x0b\x32\x0f.onebot.Message\x12\x13\n\x0braw_message\x18\x0b \x01(\t\x12\x0c\n\x04\x66ont\x18\x0c \x01(\x05\x12\x30\n\x06sender\x18\r \x01(\x0b\x32 .onebot.GroupMessageEvent.Sender\x12\x34\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32$.onebot.GroupMessageEvent.ExtraEntry\x1a\x33\n\tAnonymous\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0c\n\x04\x66lag\x18\x03 \x01(\t\x1a\x8d\x01\n\x06Sender\x12\x0f\n\x07user_id\x18\x01 \x01(\x03\x12\x10\n\x08nickname\x18\x02 \x01(\t\x12\x0c\n\x04\x63\x61rd\x18\x03 \x01(\t\x12\x0b\n\x03sex\x18\x04 \x01(\t\x12\x0b\n\x03\x61ge\x18\x05 \x01(\x05\x12\x0c\n\x04\x61rea\x18\x06 \x01(\t\x12\r\n\x05level\x18\x07 \x01(\t\x12\x0c\n\x04role\x18\x08 \x01(\t\x12\r\n\x05title\x18\t \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xea\x02\n\x16GroupUploadNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x10\n\x08group_id\x18\x05 \x01(\x03\x12\x0f\n\x07user_id\x18\x06 \x01(\x03\x12\x31\n\x04\x66ile\x18\x07 \x01(\x0b\x32#.onebot.GroupUploadNoticeEvent.File\x12\x39\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32).onebot.GroupUploadNoticeEvent.ExtraEntry\x1aJ\n\x04\x46ile\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0c\n\x04size\x18\x03 \x01(\x03\x12\r\n\x05\x62usid\x18\x04 \x01(\x03\x12\x0b\n\x03url\x18\x05 \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xfb\x01\n\x15GroupAdminNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x10\n\x08sub_type\x18\x05 \x01(\t\x12\x10\n\x08group_id\x18\x06 \x01(\x03\x12\x0f\n\x07user_id\x18\x07 \x01(\x03\x12\x38\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32(.onebot.GroupAdminNoticeEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x96\x02\n\x18GroupDecreaseNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x10\n\x08sub_type\x18\x05 \x01(\t\x12\x10\n\x08group_id\x18\x06 \x01(\x03\x12\x13\n\x0boperator_id\x18\x07 \x01(\x03\x12\x0f\n\x07user_id\x18\x08 \x01(\x03\x12;\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32+.onebot.GroupDecreaseNoticeEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x96\x02\n\x18GroupIncreaseNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x10\n\x08sub_type\x18\x05 \x01(\t\x12\x10\n\x08group_id\x18\x06 \x01(\x03\x12\x13\n\x0boperator_id\x18\x07 \x01(\x03\x12\x0f\n\x07user_id\x18\x08 \x01(\x03\x12;\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32+.onebot.GroupIncreaseNoticeEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x9e\x02\n\x13GroupBanNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x10\n\x08sub_type\x18\x05 \x01(\t\x12\x10\n\x08group_id\x18\x06 \x01(\x03\x12\x13\n\x0boperator_id\x18\x07 \x01(\x03\x12\x0f\n\x07user_id\x18\x08 \x01(\x03\x12\x10\n\x08\x64uration\x18\t \x01(\x03\x12\x36\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32&.onebot.GroupBanNoticeEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xd5\x01\n\x14\x46riendAddNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x0f\n\x07user_id\x18\x05 \x01(\x03\x12\x37\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32\'.onebot.FriendAddNoticeEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x94\x02\n\x16GroupRecallNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x10\n\x08group_id\x18\x05 \x01(\x03\x12\x0f\n\x07user_id\x18\x06 \x01(\x03\x12\x13\n\x0boperator_id\x18\x07 \x01(\x03\x12\x12\n\nmessage_id\x18\x08 \x01(\x05\x12\x39\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32).onebot.GroupRecallNoticeEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xef\x01\n\x17\x46riendRecallNoticeEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x13\n\x0bnotice_type\x18\x04 \x01(\t\x12\x0f\n\x07user_id\x18\x05 \x01(\x03\x12\x12\n\nmessage_id\x18\x06 \x01(\x05\x12:\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32*.onebot.FriendRecallNoticeEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf1\x01\n\x12\x46riendRequestEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x14\n\x0crequest_type\x18\x04 \x01(\t\x12\x0f\n\x07user_id\x18\x05 \x01(\x03\x12\x0f\n\x07\x63omment\x18\x06 \x01(\t\x12\x0c\n\x04\x66lag\x18\x07 \x01(\t\x12\x35\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32%.onebot.FriendRequestEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x93\x02\n\x11GroupRequestEvent\x12\x0c\n\x04time\x18\x01 \x01(\x03\x12\x0f\n\x07self_id\x18\x02 \x01(\x03\x12\x11\n\tpost_type\x18\x03 \x01(\t\x12\x14\n\x0crequest_type\x18\x04 \x01(\t\x12\x10\n\x08sub_type\x18\x05 \x01(\t\x12\x10\n\x08group_id\x18\x06 \x01(\x03\x12\x0f\n\x07user_id\x18\x07 \x01(\x03\x12\x0f\n\x07\x63omment\x18\x08 \x01(\t\x12\x0c\n\x04\x66lag\x18\t \x01(\t\x12\x34\n\x05\x65xtra\x18\xff\x01 \x03(\x0b\x32$.onebot.GroupRequestEvent.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x62\x06proto3'
,
dependencies=[onebot__base__pb2.DESCRIPTOR,])
_PRIVATEMESSAGEEVENT_SENDER = _descriptor.Descriptor(
name='Sender',
full_name='onebot.PrivateMessageEvent.Sender',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.PrivateMessageEvent.Sender.user_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nickname', full_name='onebot.PrivateMessageEvent.Sender.nickname', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sex', full_name='onebot.PrivateMessageEvent.Sender.sex', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='age', full_name='onebot.PrivateMessageEvent.Sender.age', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=377,
serialized_end=446,
)
_PRIVATEMESSAGEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.PrivateMessageEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.PrivateMessageEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.PrivateMessageEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_PRIVATEMESSAGEEVENT = _descriptor.Descriptor(
name='PrivateMessageEvent',
full_name='onebot.PrivateMessageEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.PrivateMessageEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.PrivateMessageEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.PrivateMessageEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message_type', full_name='onebot.PrivateMessageEvent.message_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_type', full_name='onebot.PrivateMessageEvent.sub_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message_id', full_name='onebot.PrivateMessageEvent.message_id', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.PrivateMessageEvent.user_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='onebot.PrivateMessageEvent.message', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='raw_message', full_name='onebot.PrivateMessageEvent.raw_message', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='font', full_name='onebot.PrivateMessageEvent.font', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sender', full_name='onebot.PrivateMessageEvent.sender', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.PrivateMessageEvent.extra', index=11,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_PRIVATEMESSAGEEVENT_SENDER, _PRIVATEMESSAGEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=50,
serialized_end=492,
)
_GROUPMESSAGEEVENT_ANONYMOUS = _descriptor.Descriptor(
name='Anonymous',
full_name='onebot.GroupMessageEvent.Anonymous',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='onebot.GroupMessageEvent.Anonymous.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='onebot.GroupMessageEvent.Anonymous.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flag', full_name='onebot.GroupMessageEvent.Anonymous.flag', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=890,
serialized_end=941,
)
_GROUPMESSAGEEVENT_SENDER = _descriptor.Descriptor(
name='Sender',
full_name='onebot.GroupMessageEvent.Sender',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupMessageEvent.Sender.user_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nickname', full_name='onebot.GroupMessageEvent.Sender.nickname', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='card', full_name='onebot.GroupMessageEvent.Sender.card', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sex', full_name='onebot.GroupMessageEvent.Sender.sex', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='age', full_name='onebot.GroupMessageEvent.Sender.age', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='area', full_name='onebot.GroupMessageEvent.Sender.area', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='level', full_name='onebot.GroupMessageEvent.Sender.level', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='role', full_name='onebot.GroupMessageEvent.Sender.role', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='title', full_name='onebot.GroupMessageEvent.Sender.title', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=944,
serialized_end=1085,
)
_GROUPMESSAGEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupMessageEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupMessageEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupMessageEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPMESSAGEEVENT = _descriptor.Descriptor(
name='GroupMessageEvent',
full_name='onebot.GroupMessageEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupMessageEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupMessageEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupMessageEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message_type', full_name='onebot.GroupMessageEvent.message_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_type', full_name='onebot.GroupMessageEvent.sub_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message_id', full_name='onebot.GroupMessageEvent.message_id', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupMessageEvent.group_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupMessageEvent.user_id', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='anonymous', full_name='onebot.GroupMessageEvent.anonymous', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='onebot.GroupMessageEvent.message', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='raw_message', full_name='onebot.GroupMessageEvent.raw_message', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='font', full_name='onebot.GroupMessageEvent.font', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sender', full_name='onebot.GroupMessageEvent.sender', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupMessageEvent.extra', index=13,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPMESSAGEEVENT_ANONYMOUS, _GROUPMESSAGEEVENT_SENDER, _GROUPMESSAGEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=495,
serialized_end=1131,
)
_GROUPUPLOADNOTICEEVENT_FILE = _descriptor.Descriptor(
name='File',
full_name='onebot.GroupUploadNoticeEvent.File',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='onebot.GroupUploadNoticeEvent.File.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='onebot.GroupUploadNoticeEvent.File.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='size', full_name='onebot.GroupUploadNoticeEvent.File.size', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='busid', full_name='onebot.GroupUploadNoticeEvent.File.busid', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url', full_name='onebot.GroupUploadNoticeEvent.File.url', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1376,
serialized_end=1450,
)
_GROUPUPLOADNOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupUploadNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupUploadNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupUploadNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPUPLOADNOTICEEVENT = _descriptor.Descriptor(
name='GroupUploadNoticeEvent',
full_name='onebot.GroupUploadNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupUploadNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupUploadNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupUploadNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.GroupUploadNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupUploadNoticeEvent.group_id', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupUploadNoticeEvent.user_id', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file', full_name='onebot.GroupUploadNoticeEvent.file', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupUploadNoticeEvent.extra', index=7,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPUPLOADNOTICEEVENT_FILE, _GROUPUPLOADNOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1134,
serialized_end=1496,
)
_GROUPADMINNOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupAdminNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupAdminNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupAdminNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPADMINNOTICEEVENT = _descriptor.Descriptor(
name='GroupAdminNoticeEvent',
full_name='onebot.GroupAdminNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupAdminNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupAdminNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupAdminNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.GroupAdminNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_type', full_name='onebot.GroupAdminNoticeEvent.sub_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupAdminNoticeEvent.group_id', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupAdminNoticeEvent.user_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupAdminNoticeEvent.extra', index=7,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPADMINNOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1499,
serialized_end=1750,
)
_GROUPDECREASENOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupDecreaseNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupDecreaseNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupDecreaseNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPDECREASENOTICEEVENT = _descriptor.Descriptor(
name='GroupDecreaseNoticeEvent',
full_name='onebot.GroupDecreaseNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupDecreaseNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupDecreaseNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupDecreaseNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.GroupDecreaseNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_type', full_name='onebot.GroupDecreaseNoticeEvent.sub_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupDecreaseNoticeEvent.group_id', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operator_id', full_name='onebot.GroupDecreaseNoticeEvent.operator_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupDecreaseNoticeEvent.user_id', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupDecreaseNoticeEvent.extra', index=8,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPDECREASENOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1753,
serialized_end=2031,
)
_GROUPINCREASENOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupIncreaseNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupIncreaseNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupIncreaseNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPINCREASENOTICEEVENT = _descriptor.Descriptor(
name='GroupIncreaseNoticeEvent',
full_name='onebot.GroupIncreaseNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupIncreaseNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupIncreaseNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupIncreaseNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.GroupIncreaseNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_type', full_name='onebot.GroupIncreaseNoticeEvent.sub_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupIncreaseNoticeEvent.group_id', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operator_id', full_name='onebot.GroupIncreaseNoticeEvent.operator_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupIncreaseNoticeEvent.user_id', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupIncreaseNoticeEvent.extra', index=8,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPINCREASENOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2034,
serialized_end=2312,
)
_GROUPBANNOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupBanNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupBanNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupBanNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPBANNOTICEEVENT = _descriptor.Descriptor(
name='GroupBanNoticeEvent',
full_name='onebot.GroupBanNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupBanNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupBanNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupBanNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.GroupBanNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_type', full_name='onebot.GroupBanNoticeEvent.sub_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupBanNoticeEvent.group_id', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operator_id', full_name='onebot.GroupBanNoticeEvent.operator_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupBanNoticeEvent.user_id', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='duration', full_name='onebot.GroupBanNoticeEvent.duration', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupBanNoticeEvent.extra', index=9,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPBANNOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2315,
serialized_end=2601,
)
_FRIENDADDNOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.FriendAddNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.FriendAddNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.FriendAddNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_FRIENDADDNOTICEEVENT = _descriptor.Descriptor(
name='FriendAddNoticeEvent',
full_name='onebot.FriendAddNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.FriendAddNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.FriendAddNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.FriendAddNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.FriendAddNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.FriendAddNoticeEvent.user_id', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.FriendAddNoticeEvent.extra', index=5,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_FRIENDADDNOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2604,
serialized_end=2817,
)
_GROUPRECALLNOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupRecallNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupRecallNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupRecallNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPRECALLNOTICEEVENT = _descriptor.Descriptor(
name='GroupRecallNoticeEvent',
full_name='onebot.GroupRecallNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupRecallNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupRecallNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupRecallNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.GroupRecallNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupRecallNoticeEvent.group_id', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupRecallNoticeEvent.user_id', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operator_id', full_name='onebot.GroupRecallNoticeEvent.operator_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message_id', full_name='onebot.GroupRecallNoticeEvent.message_id', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupRecallNoticeEvent.extra', index=8,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPRECALLNOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2820,
serialized_end=3096,
)
_FRIENDRECALLNOTICEEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.FriendRecallNoticeEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.FriendRecallNoticeEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.FriendRecallNoticeEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_FRIENDRECALLNOTICEEVENT = _descriptor.Descriptor(
name='FriendRecallNoticeEvent',
full_name='onebot.FriendRecallNoticeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.FriendRecallNoticeEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.FriendRecallNoticeEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.FriendRecallNoticeEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='notice_type', full_name='onebot.FriendRecallNoticeEvent.notice_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.FriendRecallNoticeEvent.user_id', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message_id', full_name='onebot.FriendRecallNoticeEvent.message_id', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.FriendRecallNoticeEvent.extra', index=6,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_FRIENDRECALLNOTICEEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3099,
serialized_end=3338,
)
_FRIENDREQUESTEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.FriendRequestEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.FriendRequestEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.FriendRequestEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_FRIENDREQUESTEVENT = _descriptor.Descriptor(
name='FriendRequestEvent',
full_name='onebot.FriendRequestEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.FriendRequestEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.FriendRequestEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.FriendRequestEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='request_type', full_name='onebot.FriendRequestEvent.request_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.FriendRequestEvent.user_id', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='comment', full_name='onebot.FriendRequestEvent.comment', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flag', full_name='onebot.FriendRequestEvent.flag', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.FriendRequestEvent.extra', index=7,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_FRIENDREQUESTEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3341,
serialized_end=3582,
)
_GROUPREQUESTEVENT_EXTRAENTRY = _descriptor.Descriptor(
name='ExtraEntry',
full_name='onebot.GroupRequestEvent.ExtraEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='onebot.GroupRequestEvent.ExtraEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='onebot.GroupRequestEvent.ExtraEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=492,
)
_GROUPREQUESTEVENT = _descriptor.Descriptor(
name='GroupRequestEvent',
full_name='onebot.GroupRequestEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time', full_name='onebot.GroupRequestEvent.time', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='self_id', full_name='onebot.GroupRequestEvent.self_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='post_type', full_name='onebot.GroupRequestEvent.post_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='request_type', full_name='onebot.GroupRequestEvent.request_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sub_type', full_name='onebot.GroupRequestEvent.sub_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_id', full_name='onebot.GroupRequestEvent.group_id', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='onebot.GroupRequestEvent.user_id', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='comment', full_name='onebot.GroupRequestEvent.comment', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flag', full_name='onebot.GroupRequestEvent.flag', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='onebot.GroupRequestEvent.extra', index=9,
number=255, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GROUPREQUESTEVENT_EXTRAENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3585,
serialized_end=3860,
)
_PRIVATEMESSAGEEVENT_SENDER.containing_type = _PRIVATEMESSAGEEVENT
_PRIVATEMESSAGEEVENT_EXTRAENTRY.containing_type = _PRIVATEMESSAGEEVENT
_PRIVATEMESSAGEEVENT.fields_by_name['message'].message_type = onebot__base__pb2._MESSAGE
_PRIVATEMESSAGEEVENT.fields_by_name['sender'].message_type = _PRIVATEMESSAGEEVENT_SENDER
_PRIVATEMESSAGEEVENT.fields_by_name['extra'].message_type = _PRIVATEMESSAGEEVENT_EXTRAENTRY
_GROUPMESSAGEEVENT_ANONYMOUS.containing_type = _GROUPMESSAGEEVENT
_GROUPMESSAGEEVENT_SENDER.containing_type = _GROUPMESSAGEEVENT
_GROUPMESSAGEEVENT_EXTRAENTRY.containing_type = _GROUPMESSAGEEVENT
_GROUPMESSAGEEVENT.fields_by_name['anonymous'].message_type = _GROUPMESSAGEEVENT_ANONYMOUS
_GROUPMESSAGEEVENT.fields_by_name['message'].message_type = onebot__base__pb2._MESSAGE
_GROUPMESSAGEEVENT.fields_by_name['sender'].message_type = _GROUPMESSAGEEVENT_SENDER
_GROUPMESSAGEEVENT.fields_by_name['extra'].message_type = _GROUPMESSAGEEVENT_EXTRAENTRY
_GROUPUPLOADNOTICEEVENT_FILE.containing_type = _GROUPUPLOADNOTICEEVENT
_GROUPUPLOADNOTICEEVENT_EXTRAENTRY.containing_type = _GROUPUPLOADNOTICEEVENT
_GROUPUPLOADNOTICEEVENT.fields_by_name['file'].message_type = _GROUPUPLOADNOTICEEVENT_FILE
_GROUPUPLOADNOTICEEVENT.fields_by_name['extra'].message_type = _GROUPUPLOADNOTICEEVENT_EXTRAENTRY
_GROUPADMINNOTICEEVENT_EXTRAENTRY.containing_type = _GROUPADMINNOTICEEVENT
_GROUPADMINNOTICEEVENT.fields_by_name['extra'].message_type = _GROUPADMINNOTICEEVENT_EXTRAENTRY
_GROUPDECREASENOTICEEVENT_EXTRAENTRY.containing_type = _GROUPDECREASENOTICEEVENT
_GROUPDECREASENOTICEEVENT.fields_by_name['extra'].message_type = _GROUPDECREASENOTICEEVENT_EXTRAENTRY
_GROUPINCREASENOTICEEVENT_EXTRAENTRY.containing_type = _GROUPINCREASENOTICEEVENT
_GROUPINCREASENOTICEEVENT.fields_by_name['extra'].message_type = _GROUPINCREASENOTICEEVENT_EXTRAENTRY
_GROUPBANNOTICEEVENT_EXTRAENTRY.containing_type = _GROUPBANNOTICEEVENT
_GROUPBANNOTICEEVENT.fields_by_name['extra'].message_type = _GROUPBANNOTICEEVENT_EXTRAENTRY
_FRIENDADDNOTICEEVENT_EXTRAENTRY.containing_type = _FRIENDADDNOTICEEVENT
_FRIENDADDNOTICEEVENT.fields_by_name['extra'].message_type = _FRIENDADDNOTICEEVENT_EXTRAENTRY
_GROUPRECALLNOTICEEVENT_EXTRAENTRY.containing_type = _GROUPRECALLNOTICEEVENT
_GROUPRECALLNOTICEEVENT.fields_by_name['extra'].message_type = _GROUPRECALLNOTICEEVENT_EXTRAENTRY
_FRIENDRECALLNOTICEEVENT_EXTRAENTRY.containing_type = _FRIENDRECALLNOTICEEVENT
_FRIENDRECALLNOTICEEVENT.fields_by_name['extra'].message_type = _FRIENDRECALLNOTICEEVENT_EXTRAENTRY
_FRIENDREQUESTEVENT_EXTRAENTRY.containing_type = _FRIENDREQUESTEVENT
_FRIENDREQUESTEVENT.fields_by_name['extra'].message_type = _FRIENDREQUESTEVENT_EXTRAENTRY
_GROUPREQUESTEVENT_EXTRAENTRY.containing_type = _GROUPREQUESTEVENT
_GROUPREQUESTEVENT.fields_by_name['extra'].message_type = _GROUPREQUESTEVENT_EXTRAENTRY
DESCRIPTOR.message_types_by_name['PrivateMessageEvent'] = _PRIVATEMESSAGEEVENT
DESCRIPTOR.message_types_by_name['GroupMessageEvent'] = _GROUPMESSAGEEVENT
DESCRIPTOR.message_types_by_name['GroupUploadNoticeEvent'] = _GROUPUPLOADNOTICEEVENT
DESCRIPTOR.message_types_by_name['GroupAdminNoticeEvent'] = _GROUPADMINNOTICEEVENT
DESCRIPTOR.message_types_by_name['GroupDecreaseNoticeEvent'] = _GROUPDECREASENOTICEEVENT
DESCRIPTOR.message_types_by_name['GroupIncreaseNoticeEvent'] = _GROUPINCREASENOTICEEVENT
DESCRIPTOR.message_types_by_name['GroupBanNoticeEvent'] = _GROUPBANNOTICEEVENT
DESCRIPTOR.message_types_by_name['FriendAddNoticeEvent'] = _FRIENDADDNOTICEEVENT
DESCRIPTOR.message_types_by_name['GroupRecallNoticeEvent'] = _GROUPRECALLNOTICEEVENT
DESCRIPTOR.message_types_by_name['FriendRecallNoticeEvent'] = _FRIENDRECALLNOTICEEVENT
DESCRIPTOR.message_types_by_name['FriendRequestEvent'] = _FRIENDREQUESTEVENT
DESCRIPTOR.message_types_by_name['GroupRequestEvent'] = _GROUPREQUESTEVENT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PrivateMessageEvent = _reflection.GeneratedProtocolMessageType('PrivateMessageEvent', (_message.Message,), {
'Sender' : _reflection.GeneratedProtocolMessageType('Sender', (_message.Message,), {
'DESCRIPTOR' : _PRIVATEMESSAGEEVENT_SENDER,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.PrivateMessageEvent.Sender)
})
,
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _PRIVATEMESSAGEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.PrivateMessageEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _PRIVATEMESSAGEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.PrivateMessageEvent)
})
_sym_db.RegisterMessage(PrivateMessageEvent)
_sym_db.RegisterMessage(PrivateMessageEvent.Sender)
_sym_db.RegisterMessage(PrivateMessageEvent.ExtraEntry)
GroupMessageEvent = _reflection.GeneratedProtocolMessageType('GroupMessageEvent', (_message.Message,), {
'Anonymous' : _reflection.GeneratedProtocolMessageType('Anonymous', (_message.Message,), {
'DESCRIPTOR' : _GROUPMESSAGEEVENT_ANONYMOUS,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupMessageEvent.Anonymous)
})
,
'Sender' : _reflection.GeneratedProtocolMessageType('Sender', (_message.Message,), {
'DESCRIPTOR' : _GROUPMESSAGEEVENT_SENDER,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupMessageEvent.Sender)
})
,
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPMESSAGEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupMessageEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPMESSAGEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupMessageEvent)
})
_sym_db.RegisterMessage(GroupMessageEvent)
_sym_db.RegisterMessage(GroupMessageEvent.Anonymous)
_sym_db.RegisterMessage(GroupMessageEvent.Sender)
_sym_db.RegisterMessage(GroupMessageEvent.ExtraEntry)
GroupUploadNoticeEvent = _reflection.GeneratedProtocolMessageType('GroupUploadNoticeEvent', (_message.Message,), {
'File' : _reflection.GeneratedProtocolMessageType('File', (_message.Message,), {
'DESCRIPTOR' : _GROUPUPLOADNOTICEEVENT_FILE,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupUploadNoticeEvent.File)
})
,
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPUPLOADNOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupUploadNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPUPLOADNOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupUploadNoticeEvent)
})
_sym_db.RegisterMessage(GroupUploadNoticeEvent)
_sym_db.RegisterMessage(GroupUploadNoticeEvent.File)
_sym_db.RegisterMessage(GroupUploadNoticeEvent.ExtraEntry)
GroupAdminNoticeEvent = _reflection.GeneratedProtocolMessageType('GroupAdminNoticeEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPADMINNOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupAdminNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPADMINNOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupAdminNoticeEvent)
})
_sym_db.RegisterMessage(GroupAdminNoticeEvent)
_sym_db.RegisterMessage(GroupAdminNoticeEvent.ExtraEntry)
GroupDecreaseNoticeEvent = _reflection.GeneratedProtocolMessageType('GroupDecreaseNoticeEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPDECREASENOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupDecreaseNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPDECREASENOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupDecreaseNoticeEvent)
})
_sym_db.RegisterMessage(GroupDecreaseNoticeEvent)
_sym_db.RegisterMessage(GroupDecreaseNoticeEvent.ExtraEntry)
GroupIncreaseNoticeEvent = _reflection.GeneratedProtocolMessageType('GroupIncreaseNoticeEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPINCREASENOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupIncreaseNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPINCREASENOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupIncreaseNoticeEvent)
})
_sym_db.RegisterMessage(GroupIncreaseNoticeEvent)
_sym_db.RegisterMessage(GroupIncreaseNoticeEvent.ExtraEntry)
GroupBanNoticeEvent = _reflection.GeneratedProtocolMessageType('GroupBanNoticeEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPBANNOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupBanNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPBANNOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupBanNoticeEvent)
})
_sym_db.RegisterMessage(GroupBanNoticeEvent)
_sym_db.RegisterMessage(GroupBanNoticeEvent.ExtraEntry)
FriendAddNoticeEvent = _reflection.GeneratedProtocolMessageType('FriendAddNoticeEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _FRIENDADDNOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.FriendAddNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _FRIENDADDNOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.FriendAddNoticeEvent)
})
_sym_db.RegisterMessage(FriendAddNoticeEvent)
_sym_db.RegisterMessage(FriendAddNoticeEvent.ExtraEntry)
GroupRecallNoticeEvent = _reflection.GeneratedProtocolMessageType('GroupRecallNoticeEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPRECALLNOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupRecallNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPRECALLNOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupRecallNoticeEvent)
})
_sym_db.RegisterMessage(GroupRecallNoticeEvent)
_sym_db.RegisterMessage(GroupRecallNoticeEvent.ExtraEntry)
FriendRecallNoticeEvent = _reflection.GeneratedProtocolMessageType('FriendRecallNoticeEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _FRIENDRECALLNOTICEEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.FriendRecallNoticeEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _FRIENDRECALLNOTICEEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.FriendRecallNoticeEvent)
})
_sym_db.RegisterMessage(FriendRecallNoticeEvent)
_sym_db.RegisterMessage(FriendRecallNoticeEvent.ExtraEntry)
FriendRequestEvent = _reflection.GeneratedProtocolMessageType('FriendRequestEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _FRIENDREQUESTEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.FriendRequestEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _FRIENDREQUESTEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.FriendRequestEvent)
})
_sym_db.RegisterMessage(FriendRequestEvent)
_sym_db.RegisterMessage(FriendRequestEvent.ExtraEntry)
GroupRequestEvent = _reflection.GeneratedProtocolMessageType('GroupRequestEvent', (_message.Message,), {
'ExtraEntry' : _reflection.GeneratedProtocolMessageType('ExtraEntry', (_message.Message,), {
'DESCRIPTOR' : _GROUPREQUESTEVENT_EXTRAENTRY,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupRequestEvent.ExtraEntry)
})
,
'DESCRIPTOR' : _GROUPREQUESTEVENT,
'__module__' : 'onebot_event_pb2'
# @@protoc_insertion_point(class_scope:onebot.GroupRequestEvent)
})
_sym_db.RegisterMessage(GroupRequestEvent)
_sym_db.RegisterMessage(GroupRequestEvent.ExtraEntry)
_PRIVATEMESSAGEEVENT_EXTRAENTRY._options = None
_GROUPMESSAGEEVENT_EXTRAENTRY._options = None
_GROUPUPLOADNOTICEEVENT_EXTRAENTRY._options = None
_GROUPADMINNOTICEEVENT_EXTRAENTRY._options = None
_GROUPDECREASENOTICEEVENT_EXTRAENTRY._options = None
_GROUPINCREASENOTICEEVENT_EXTRAENTRY._options = None
_GROUPBANNOTICEEVENT_EXTRAENTRY._options = None
_FRIENDADDNOTICEEVENT_EXTRAENTRY._options = None
_GROUPRECALLNOTICEEVENT_EXTRAENTRY._options = None
_FRIENDRECALLNOTICEEVENT_EXTRAENTRY._options = None
_FRIENDREQUESTEVENT_EXTRAENTRY._options = None
_GROUPREQUESTEVENT_EXTRAENTRY._options = None
# @@protoc_insertion_point(module_scope)
| 45.343612
| 7,599
| 0.744519
| 11,609
| 92,637
| 5.67017
| 0.02343
| 0.059916
| 0.038921
| 0.047095
| 0.848355
| 0.780904
| 0.774478
| 0.765545
| 0.747588
| 0.733369
| 0
| 0.041897
| 0.129398
| 92,637
| 2,042
| 7,600
| 45.365818
| 0.774284
| 0.023684
| 0
| 0.757513
| 1
| 0.001554
| 0.188256
| 0.14722
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002591
| 0
| 0.002591
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e00d42b5d72906ba0fd604e6e6bc4e20648f28a1
| 333,683
|
py
|
Python
|
lib/python2.7/site-packages/ldap3/protocol/schemas/ad2012R2.py
|
crav7/ProjectDjango
|
10dc03919b1fcfc34d2ddc93b85989638399e3e9
|
[
"MIT"
] | 2
|
2020-12-30T18:27:17.000Z
|
2020-12-30T18:27:18.000Z
|
lib/python2.7/site-packages/ldap3/protocol/schemas/ad2012R2.py
|
jppgibbs/Aegis
|
feac08cd3935569057e75531fe80bd0e1f982a93
|
[
"MIT"
] | null | null | null |
lib/python2.7/site-packages/ldap3/protocol/schemas/ad2012R2.py
|
jppgibbs/Aegis
|
feac08cd3935569057e75531fe80bd0e1f982a93
|
[
"MIT"
] | null | null | null |
"""
"""
# Created on 2014.10.21
#
# Author: Giovanni Cannata
#
# Copyright 2014, 2015, 2016, 2017 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
ad_2012_r2_schema = """
{
"raw": {
"attributeTypes": [
"( 1.2.840.113556.1.4.149 NAME 'attributeSecurityGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1703 NAME 'msDS-FilterContainers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.655 NAME 'legacyExchangeDN' SYNTAX '1.2.840.113556.1.4.905' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.21 NAME 'cOMProgID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2147 NAME 'msDNS-PropagationTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.301 NAME 'msSFU30KeyAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.686 NAME 'domainID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.23 NAME 'msDFSR-ReplicationGroupGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.818 NAME 'productCode' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.18 NAME 'oncRpcNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.221 NAME 'sAMAccountName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.375 NAME 'systemFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.814 NAME 'msiScript' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.880 NAME 'fRSTimeLastCommand' SYNTAX '1.3.6.1.4.1.1466.115.121.1.53' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1850 NAME 'msDS-TopQuotaUsage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2052 NAME 'msDS-OIDToGroupLinkBl' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.965 NAME 'mSMQSiteName' SYNTAX '1.2.840.113556.1.4.905' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1373 NAME 'mS-SQL-Clustered' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.624 NAME 'ipsecOwnersReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1353 NAME 'localizationDisplayId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1637 NAME 'msWMI-StringValidValues' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2103 NAME 'msDS-MembersOfResourcePropertyList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.480 NAME 'defaultGroup' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.55 NAME 'dBCSPwd' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1330 NAME 'pKICriticalExtensions' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.93 NAME 'pwdProperties' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1840 NAME 'msDS-ObjectReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.7 NAME 'subRefs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.845 NAME 'msiScriptName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2242 NAME 'msDS-MaximumRegistrationInactivityPeriod' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.7 NAME 'photo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.713 NAME 'optionsLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.942 NAME 'mSMQVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2138 NAME 'msDNS-NSEC3Iterations' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.471 NAME 'trustParent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1237 NAME 'mSMQRoutingService' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.649 NAME 'primaryInternationalISDNNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1627 NAME 'msWMI-ID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2006 NAME 'msTSExpireDate4' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2003 NAME 'msTSExpireDate3' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2000 NAME 'msTSExpireDate2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.12 NAME 'documentTitle' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113549.1.9.8 NAME 'unstructuredAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.18.1.340 NAME 'msSFU30Domains' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.2069 NAME 'msDS-EnabledFeatureBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.3.6.1.1.1.1.6 NAME 'shadowMin' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1412 NAME 'primaryGroupToken' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.358 NAME 'netbootInitialization' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2136 NAME 'msDNS-NSEC3HashAlgorithm' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.1 NAME 'instanceType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.846 NAME 'msiScriptSize' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.20 NAME 'msDFSR-RdcMinFileSizeInKb' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.663 NAME 'partialAttributeDeletionList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2078 NAME 'msTSSecondaryDesktopBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1995 NAME 'msTSManagingLS' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.371 NAME 'rIDAllocationPool' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.677 NAME 'replTopologyStayOfExecution' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.3 NAME 'replPropertyMetaData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2036 NAME 'msDFS-Commentv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.329 NAME 'versionNumberLo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.234 NAME 'printEndTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1673 NAME 'msPKI-OID-User-Notice' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.684 NAME 'certificateAuthorityObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.290 NAME 'printNumberUp' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1625 NAME 'msWMI-ClassDefinition' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1784 NAME 'msDS-LogonTimeSyncInterval' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1910 NAME 'unixUserPassword' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.129 NAME 'trustAuthIncoming' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1319 NAME 'aCSNonReservedTokenSize' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1628 NAME 'msWMI-IntDefault' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1249 NAME 'proxiedObjectName' SYNTAX '1.2.840.113556.1.4.903' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2173 NAME 'msKds-PublicKeyLength' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 2.5.4.27 NAME 'destinationIndicator' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.2187 NAME 'msDS-ValueTypeReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.897 NAME 'aCSMaxAggregatePeakRatePerUser' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1335 NAME 'pKIEnrollmentAccess' SYNTAX '1.2.840.113556.1.4.907' )",
"( 1.2.840.113556.1.4.1708 NAME 'msDS-ReplValueMetaData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1690 NAME 'adminMultiselectPropertyPages' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.35 NAME 'userPassword' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2200 NAME 'msDS-GroupMSAMembership' SYNTAX '1.2.840.113556.1.4.907' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.500 NAME 'fRSServiceCommand' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.16.840.1.113730.3.1.1 NAME 'carLicense' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2038 NAME 'msDFS-TargetListv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.27 NAME 'msDFSR-DeletedSizeInMb' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1648 NAME 'msWMI-TargetPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.5 NAME 'shadowLastChange' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1793 NAME 'msDS-NonMembers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.3.6.1.1.1.1.22 NAME 'macAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.265 NAME 'notes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2274 NAME 'msDS-CloudIssuerPublicCertificates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1982 NAME 'msTSMaxConnectionTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1959 NAME 'msDS-isGC' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1424 NAME 'msCOM-PartitionSetLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.516 NAME 'serverReferenceBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1977 NAME 'msTSHomeDirectory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1369 NAME 'mS-SQL-ServiceAccount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.530 NAME 'nonSecurityMember' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.506 NAME 'objectCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1386 NAME 'mS-SQL-GPSLongitude' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1437 NAME 'msPKI-Supersede-Templates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1707 NAME 'msDS-ReplAttributeMetaData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.652 NAME 'assistant' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1644 NAME 'msWMI-SourceOrganization' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1443 NAME 'msDS-Site-Affinity' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.286 NAME 'printRateUnit' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1444 NAME 'msDS-Preferred-GC-Site' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.589 NAME 'meetingBandwidth' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.4.1706 NAME 'msDS-NCReplOutboundNeighbors' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1709 NAME 'msDS-HasInstantiatedNCs' SYNTAX '1.2.840.113556.1.4.903' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.79 NAME 'minPwdLength' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1952 NAME 'ms-net-ieee-80211-GP-PolicyData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.865 NAME 'pekList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 2.5.4.26 NAME 'registeredAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2179 NAME 'msKds-CreateTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2149 NAME 'msDNS-NSEC3CurrentSalt' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1815 NAME 'msDS-TasksForAzRoleBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2148 NAME 'msDNS-NSEC3UserSalt' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2197 NAME 'msDS-ManagedPasswordId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1407 NAME 'mS-SQL-ThirdParty' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.510 NAME 'serviceBindingInformation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1416 NAME 'mSMQSiteNameEx' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1426 NAME 'msCOM-UserPartitionSetLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1303 NAME 'tokenGroupsNoGCAcceptable' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.596 NAME 'msExchHouseIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2233 NAME 'msDS-cloudExtensionAttribute20' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.335 NAME 'currentLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.20 NAME 'homePhone' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1441 NAME 'msDS-Cached-Membership' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.14 NAME 'msDFSR-Schedule' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.622 NAME 'ipsecDataType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.645 NAME 'userCert' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.367 NAME 'rpcNsCodeset' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.223 NAME 'serverName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.950 NAME 'mSMQServices' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2250 NAME 'msDS-DeviceOSVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.332 NAME 'birthLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1440 NAME 'msDs-Schema-Extensions' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1348 NAME 'gPCMachineExtensionNames' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1833 NAME 'msDS-ExternalKey' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.858 NAME 'netbootTools' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1717 NAME 'msDS-AdditionalDnsHostName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.770 NAME 'aCSEnableACSService' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.170 NAME 'systemOnly' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.32 NAME 'domainPolicyObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.766 NAME 'aCSAllocableRSVPBandwidth' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.9 NAME 'helpData32' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1805 NAME 'msDS-AzGenerateAudits' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.276 NAME 'driverVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1317 NAME 'aCSMinimumDelayVariation' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.302 NAME 'sAMAccountType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.610 NAME 'employeeNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.30 NAME 'attributeID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.3.6.1.4.1.1466.101.119.3 NAME 'entryTTL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1843 NAME 'msDRM-IdentityCertificate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.6.13.3.103 NAME 'msDFSR-ComputerReferenceBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1989 NAME 'msTSWorkDirectory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1674 NAME 'msPKI-Certificate-Application-Policy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.716 NAME 'mscopeId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.514 NAME 'physicalLocationObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.570 NAME 'meetingProtocol' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.2.370 NAME 'objectClassCategory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.15 NAME 'msDFSR-Keywords' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.812 NAME 'createWizardExt' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.61 NAME 'lockOutObservationWindow' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.750 NAME 'groupType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1459 NAME 'msDS-Behavior-Version' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.937 NAME 'mSMQSignKey' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.913 NAME 'allowedAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.120 NAME 'uSNChanged' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.340 NAME 'rightsGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.277 NAME 'otherHomePhone' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1309 NAME 'mSMQInterval2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1439 NAME 'msPKI-Certificate-Policy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1308 NAME 'mSMQInterval1' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1383 NAME 'mS-SQL-ConnectionURL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2176 NAME 'msKds-Version' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.859 NAME 'netbootLocallyInstalledOSes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.967 NAME 'mSMQSignCertificatesMig' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2232 NAME 'msDS-cloudExtensionAttribute19' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2231 NAME 'msDS-cloudExtensionAttribute18' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2230 NAME 'msDS-cloudExtensionAttribute17' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2229 NAME 'msDS-cloudExtensionAttribute16' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2228 NAME 'msDS-cloudExtensionAttribute15' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2227 NAME 'msDS-cloudExtensionAttribute14' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2226 NAME 'msDS-cloudExtensionAttribute13' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2225 NAME 'msDS-cloudExtensionAttribute12' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2142 NAME 'msDNS-SecureDelegationPollingPeriod' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2224 NAME 'msDS-cloudExtensionAttribute11' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.718 NAME 'dhcpProperties' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2223 NAME 'msDS-cloudExtensionAttribute10' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.157 NAME 'serverRole' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1394 NAME 'mS-SQL-AllowAnonymousSubscription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.563 NAME 'shellPropertyPages' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1315 NAME 'aCSMinimumPolicedSize' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.273 NAME 'printStatus' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.644 NAME 'showInAddressBook' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.626 NAME 'ipsecISAKMPReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1925 NAME 'msDS-hasFullReplicaNCs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.940 NAME 'mSMQCSPName' SYNTAX '1.2.840.113556.1.4.905' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.30 NAME 'msDFSR-MinDurationCacheInMin' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.243 NAME 'printColor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2193 NAME 'msDS-TDOIngressBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.3.6.1.1.1.1.1 NAME 'gidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1993 NAME 'msTSExpireDate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE )",
"( 2.5.4.2 NAME 'knowledgeInformation' SYNTAX '1.2.840.113556.1.4.905' )",
"( 1.2.840.113556.1.4.908 NAME 'extendedClassInfo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.953 NAME 'mSMQSiteID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2040 NAME 'msDFS-LinkSecurityDescriptorv2' SYNTAX '1.2.840.113556.1.4.907' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1343 NAME 'dSUIAdminNotification' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1700 NAME 'msTAPI-ConferenceBlob' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.486 NAME 'fRSWorkingPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.62 NAME 'scriptPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1810 NAME 'msDS-TasksForAzTask' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.6.13.3.31 NAME 'msDFSR-MaxAgeInCacheInMin' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.19 NAME 'cOMClassID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.16.840.1.113730.3.1.216 NAME 'userPKCS12' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.108 NAME 'remoteSourceType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.704 NAME 'dhcpServers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.876 NAME 'fRSMemberReferenceBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2261 NAME 'msDS-DeviceLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.82 NAME 'moniker' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.289 NAME 'printMediaReady' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.3.6.1.1.1.1.17 NAME 'ipProtocolNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1209 NAME 'shortServerName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.910 NAME 'fromEntry' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.636 NAME 'privilegeAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2025 NAME 'msDS-IsUserCachableAtRodc' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1715 NAME 'msDS-SPNSuffixes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.562 NAME 'adminPropertyPages' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 0.9.2342.19200300.100.1.10 NAME 'manager' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 2.5.4.49 NAME 'distinguishedName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1356 NAME 'validAccesses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2053 NAME 'msImaging-PSPIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.459 NAME 'machineWidePolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1403 NAME 'mS-SQL-AllowKnownPullSubscription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.283 NAME 'assetNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.885 NAME 'terminalServer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2012 NAME 'msDS-MinimumPasswordAge' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.7 NAME 'msDFSR-ConflictPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1831 NAME 'msDS-ByteArray' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.135 NAME 'trustAuthOutgoing' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2258 NAME 'msDS-RegisteredOwner' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.608 NAME 'queryPolicyBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.109 NAME 'replicaSource' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2296 NAME 'msDS-AssignedAuthNPolicyBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.402 NAME 'helpData16' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.232 NAME 'defaultPriority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1388 NAME 'mS-SQL-Version' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.364 NAME 'operatingSystemVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2284 NAME 'msDS-ServiceTGTLifetime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1460 NAME 'msDS-User-Account-Control-Computed' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.75 NAME 'maxRenewAge' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.285 NAME 'printRate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.911 NAME 'allowedChildClasses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.615 NAME 'personalTitle' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1225 NAME 'mSMQPrevSiteGates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.2131 NAME 'msDNS-SignWithNSEC3' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2007 NAME 'msTSLicenseVersion4' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.13 NAME 'documentVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 0.9.2342.19200300.100.1.3 NAME 'mail' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2004 NAME 'msTSLicenseVersion3' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2001 NAME 'msTSLicenseVersion2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.507 NAME 'volumeCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.137 NAME 'uNCName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2022 NAME 'msDS-ResultantPSO' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.168 NAME 'modifiedCount' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1809 NAME 'msDS-OperationsForAzTaskBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.328 NAME 'versionNumberHi' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2099 NAME 'msDS-ClaimAttributeSource' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.754 NAME 'rpcNsEntryFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.778 NAME 'aCSDSBMDeadTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.917 NAME 'mSMQQueueType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.326 NAME 'packageName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.422 NAME 'domainPolicyReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2241 NAME 'msDS-RegistrationQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.3 NAME 'msDFSR-RootPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1817 NAME 'msDS-AzApplicationVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.436 NAME 'directReports' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.240 NAME 'printOrientationsSupported' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.574 NAME 'meetingLanguage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.43 NAME 'fRSVersionGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 2.5.4.30 NAME 'supportedApplicationContext' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.26 NAME 'rDNAttID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1409 NAME 'masteredBy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.752 NAME 'userSharedFolderOther' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2199 NAME 'msDS-ManagedPasswordInterval' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1932 NAME 'msDS-IsFullReplicaFor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.22 NAME 'msDFSR-RootFence' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.789 NAME 'transportDLLName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.499 NAME 'contextMenu' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.202 NAME 'auditingPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.11 NAME 'msDFSR-TombstoneExpiryInMin' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1364 NAME 'mS-SQL-RegisteredOwner' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.8 NAME 'userClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.775 NAME 'aCSMaxSizeOfRSVPLogFile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.144 NAME 'operatorCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1238 NAME 'mSMQDsService' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1984 NAME 'msTSReconnectionAction' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2286 NAME 'msDS-AssignedAuthNPolicySiloBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2180 NAME 'msImaging-ThumbprintHash' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.68 NAME 'machineArchitecture' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.4.1311 NAME 'printDuplexSupported' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1795 NAME 'msDS-AzDomainTimeout' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1992 NAME 'msTSProperty02' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.583 NAME 'meetingURL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1991 NAME 'msTSProperty01' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.635 NAME 'privilegeValue' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2100 NAME 'msDS-ClaimTypeAppliesToClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.115 NAME 'invocationId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2288 NAME 'msDS-AuthNPolicySiloMembersBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1681 NAME 'msWMI-intFlags4' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1680 NAME 'msWMI-intFlags3' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1679 NAME 'msWMI-intFlags2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1678 NAME 'msWMI-intFlags1' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.100 NAME 'msDFSR-MemberReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.100 NAME 'priorValue' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1379 NAME 'mS-SQL-Vines' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1377 NAME 'mS-SQL-TCPIP' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2172 NAME 'msKds-SecretAgreementParam' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2222 NAME 'msDS-cloudExtensionAttribute9' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2221 NAME 'msDS-cloudExtensionAttribute8' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2220 NAME 'msDS-cloudExtensionAttribute7' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2219 NAME 'msDS-cloudExtensionAttribute6' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2218 NAME 'msDS-cloudExtensionAttribute5' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.661 NAME 'isDefunct' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2217 NAME 'msDS-cloudExtensionAttribute4' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.653 NAME 'managedBy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2216 NAME 'msDS-cloudExtensionAttribute3' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2215 NAME 'msDS-cloudExtensionAttribute2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2214 NAME 'msDS-cloudExtensionAttribute1' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.588 NAME 'meetingEndTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.53' )",
"( 1.2.840.113556.1.4.498 NAME 'creationWizard' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1915 NAME 'msRADIUS-FramedIpv6Prefix' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.12 NAME 'msDFSR-FileFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.24 NAME 'x121Address' SYNTAX '1.3.6.1.4.1.1466.115.121.1.36' )",
"( 1.2.840.113556.1.4.637 NAME 'privilegeHolder' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.214 NAME 'originalDisplayTableMSDOS' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.211 NAME 'schedule' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1228 NAME 'mSMQDsServices' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.64 NAME 'logonHours' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.883 NAME 'msRRASVendorAttributeEntry' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.58 NAME 'localeID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.4.97 NAME 'preferredOU' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2033 NAME 'msDFS-NamespaceIdentityGUIDv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1334 NAME 'pKIDefaultCSPs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1411 NAME 'ms-DS-MachineAccountQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.891 NAME 'gPLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.617 NAME 'homePostalAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.320 NAME 'implementedCategories' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.19 NAME 'uSNCreated' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.651 NAME 'otherMailbox' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.18.1.345 NAME 'msSFU30NSMAPFieldPosition' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.618 NAME 'wellKnownObjects' SYNTAX '1.2.840.113556.1.4.903' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2160 NAME 'msDS-ClaimIsSingleValued' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.1 NAME 'msDFSR-Version' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.874 NAME 'fRSFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1713 NAME 'MSMQ-SecuredSource' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.825 NAME 'enrollmentProviders' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.666 NAME 'syncAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.665 NAME 'syncMembership' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.48 NAME 'keywords' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2169 NAME 'msKds-KDFAlgorithmID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.370 NAME 'rIDAvailablePool' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.214 NAME 'nextLevelStore' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1145 NAME 'msRADIUSCallbackNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.303 NAME 'msSFU30IntraFieldSeparator' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.346 NAME 'desktopProfile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.20 NAME 'cOMInterfaceID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.279 NAME 'printMinXExtent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1213 NAME 'assocNTAccount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.671 NAME 'msiFileList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2032 NAME 'msDFS-GenerationGUIDv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2279 NAME 'msDS-UserTGTLifetime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.10 NAME 'msDFSR-ReplicationGroupType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1790 NAME 'msDS-PerUserTrustTombstonesQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1124 NAME 'msNPCallingStationID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 0.9.2342.19200300.100.1.2 NAME 'textEncodedORAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.487 NAME 'fRSRootPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1807 NAME 'msDS-MembersForAzRoleBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1823 NAME 'msieee80211-ID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.791 NAME 'transportType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.674 NAME 'rootTrust' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1641 NAME 'msWMI-PropertyName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.25 NAME 'mayContain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' )",
"( 1.2.840.113556.1.4.1438 NAME 'msPKI-RA-Policies' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.769 NAME 'aCSEventLogLevel' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.0 NAME 'uidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.9 NAME 'shadowInactive' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.945 NAME 'mSMQSiteGates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 2.5.4.25 NAME 'internationalISDNNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.36' )",
"( 1.2.840.113556.1.4.1979 NAME 'msTSAllowLogon' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.274 NAME 'printSpooling' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.242 NAME 'printCollate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1345 NAME 'dSUIShellMaximum' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.693 NAME 'pendingCACertificates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2257 NAME 'msDS-DeviceObjectVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.852 NAME 'netbootCurrentClientCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.534 NAME 'fRSLevelLimit' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1685 NAME 'msWMI-Parm4' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1684 NAME 'msWMI-Parm3' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1988 NAME 'msTSDefaultToMainPrinter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1913 NAME 'msRADIUS-FramedInterfaceId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.353 NAME 'displayNamePrintable' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1696 NAME 'lastLogonTimestamp' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1683 NAME 'msWMI-Parm2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.330 NAME 'lastUpdateSequence' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.696 NAME 'currentParentCA' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.689 NAME 'cRLObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1682 NAME 'msWMI-Parm1' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.22 NAME 'governsID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1632 NAME 'msWMI-Int8Default' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.169 NAME 'logonCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.772 NAME 'aCSPolicyName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.38 NAME 'authorityRevocationList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1212 NAME 'isEphemeral' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.324 NAME 'packageType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1435 NAME 'msPKI-Template-Minor-Revision' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2010 NAME 'msTSLSProperty02' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1961 NAME 'msDS-SiteName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2009 NAME 'msTSLSProperty01' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1336 NAME 'replInterval' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2066 NAME 'msDS-RequiredDomainBehaviorVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2185 NAME 'msDS-GeoCoordinatesLongitude' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2182 NAME 'msDS-AllowedToActOnBehalfOfOtherIdentity' SYNTAX '1.2.840.113556.1.4.907' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.3.6.1.1.1.1.11 NAME 'shadowFlag' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.8 NAME 'msDFSR-ConflictSizeInMb' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.357 NAME 'nTMixedDomain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2191 NAME 'msDS-IngressClaimsTransformationPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1892 NAME 'msPKIRoamingTimeStamp' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2174 NAME 'msKds-PrivateKeyLength' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.324 NAME 'addressEntryDisplayTable' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.218 NAME 'applicationName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1318 NAME 'aCSNonReservedPeakRate' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2023 NAME 'msDS-PasswordSettingsPrecedence' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.99 NAME 'priorSetTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.914 NAME 'allowedAttributesEffective' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.271 NAME 'printOwner' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1996 NAME 'msDS-UserPasswordExpiryTimeComputed' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.930 NAME 'mSMQServiceType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1780 NAME 'hideFromAB' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.578 NAME 'meetingContactInfo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2020 NAME 'msDS-PSOAppliesTo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1944 NAME 'msDS-PhoneticDepartment' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1792 NAME 'msDS-AzLDAPQuery' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.251 NAME 'cOMTreatAsClassId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.14 NAME 'builtinModifiedCount' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.7 NAME 'shadowMax' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.325 NAME 'setupCommand' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1647 NAME 'msWMI-TargetObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.420 NAME 'publicKeyPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1714 NAME 'MSMQ-MulticastAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1677 NAME 'msWMI-Genus' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2281 NAME 'msDS-ComputerTGTLifetime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1671 NAME 'msPKI-OID-Attribute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.36 NAME 'dMDLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.810 NAME 'createDialog' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2140 NAME 'msDNS-DSRecordSetTTL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1358 NAME 'schemaInfo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1624 NAME 'msWMI-ChangeDate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1975 NAME 'msDS-RevealedListBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1962 NAME 'msDS-PromotionSettings' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.229 NAME 'driverName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.378 NAME 'dnsAllowDynamic' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1246 NAME 'interSiteTopologyGenerator' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.817 NAME 'localizedDescription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2235 NAME 'msDS-ReplValueMetaDataExt' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1933 NAME 'msDS-IsDomainFor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2213 NAME 'msDS-RIDPoolAllocationEnabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.73 NAME 'lockoutThreshold' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.50 NAME 'lastContentIndexed' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.824 NAME 'signatureAlgorithms' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.860 NAME 'netbootServer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.40 NAME 'msDFSR-StagingCleanupTriggerInPercent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1695 NAME 'msMQ-Recipient-FormatName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1966 NAME 'msTPM-OwnerInformation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.156 NAME 'comment' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.650 NAME 'mhsORAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.929 NAME 'mSMQInRoutingServers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1787 NAME 'msDS-AllowedToDelegateTo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1389 NAME 'mS-SQL-Language' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.18 NAME 'msDFSR-ContentSetGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.8 NAME 'possSuperiors' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' )",
"( 1.2.840.113556.1.4.912 NAME 'allowedChildClassesEffective' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2132 NAME 'msDNS-NSEC3OptOut' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.136 NAME 'trustType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1879 NAME 'msDS-SourceObjectDN' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.533 NAME 'fRSReplicaSetGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1434 NAME 'msPKI-Template-Schema-Version' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.241 NAME 'printMaxCopies' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.961 NAME 'mSMQSiteForeign' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' )",
"( 1.2.840.113556.1.4.1808 NAME 'msDS-OperationsForAzTask' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1242 NAME 'dNReferenceUpdate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 0.9.2342.19200300.100.1.5 NAME 'drink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1923 NAME 'msDS-KrbTgtLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1402 NAME 'mS-SQL-Publisher' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2018 NAME 'msDS-LockoutDuration' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.688 NAME 'cAWEBURL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.23 NAME 'bootParameter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.536 NAME 'fRSExtensions' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.233 NAME 'printStartTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1788 NAME 'msDS-PerUserTrustQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.159 NAME 'accountExpires' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.14 NAME 'nisNetgroupTriple' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.1390 NAME 'mS-SQL-Description' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.224 NAME 'defaultSecurityDescriptor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113549.1.9.2 NAME 'unstructuredName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.695 NAME 'pendingParentCA' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1375 NAME 'mS-SQL-MultiProtocol' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2152 NAME 'msAuthz-LastEffectiveSecurityPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.56 NAME 'localPolicyFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1392 NAME 'mS-SQL-InformationDirectory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2159 NAME 'msDS-ClaimIsValueSpaceRestricted' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.708 NAME 'dhcpSites' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.717 NAME 'dhcpState' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.762 NAME 'aCSServiceType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.132 NAME 'trustDirection' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.312 NAME 'rpcNsObjectID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1395 NAME 'mS-SQL-Alias' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.18.2 NAME 'modifyTimeStamp' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2145 NAME 'msDNS-DNSKEYRecords' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.301 NAME 'wbemPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.0 NAME 'objectClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.21 NAME 'msDFSR-DfsPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1429 NAME 'msPKI-RA-Signature' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1699 NAME 'msTAPI-ProtocolId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2086 NAME 'msSPP-PhoneLicense' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.120 NAME 'schemaFlagsEx' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1945 NAME 'msDS-PhoneticCompanyName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.916 NAME 'canonicalName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.702 NAME 'dhcpObjName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2238 NAME 'msds-memberTransitive' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.133 NAME 'trustPartner' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.927 NAME 'mSMQSites' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.867 NAME 'altSecurityIdentities' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.615 NAME 'shellContextMenu' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.866 NAME 'pekKeyChangeInterval' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2046 NAME 'addressBookRoots2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.27 NAME 'currentValue' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.166 NAME 'groupMembershipSAM' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1926 NAME 'msDS-NeverRevealGroup' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.6.13.3.28 NAME 'msDFSR-ReadOnly' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1314 NAME 'aCSMaximumSDUSize' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.457 NAME 'localPolicyReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1189 NAME 'msRASSavedCallbackNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1918 NAME 'msRADIUS-SavedFramedIpv6Route' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 2.5.21.2 NAME 'dITContentRules' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.895 NAME 'transportAddressAttribute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1418 NAME 'tokenGroupsGlobalAndUniversal' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.850 NAME 'netbootLimitClients' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 2.16.840.1.113730.3.1.2 NAME 'departmentNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.944 NAME 'mSMQSite2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.943 NAME 'mSMQSite1' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1664 NAME 'msDS-Replication-Notify-Subsequent-DSA-Delay' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.537 NAME 'dynamicLDAPServer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2249 NAME 'msDS-DeviceOSType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.35 NAME 'employeeID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2137 NAME 'msDNS-NSEC3RandomSaltLength' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2151 NAME 'msAuthz-ProposedSecurityPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.267 NAME 'uSNDSALastObjRemoved' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.963 NAME 'mSMQQueueJournalQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.607 NAME 'queryPolicyObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1978 NAME 'msTSHomeDrive' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.593 NAME 'msExchLabeledURI' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1329 NAME 'pKIMaxIssuingDepth' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2061 NAME 'msDS-EnabledFeature' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.278 NAME 'printMaxYExtent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.16 NAME 'codePage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1802 NAME 'msDS-AzBizRuleLanguage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.363 NAME 'operatingSystem' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.761 NAME 'aCSMaxDurationPerFlow' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.921 NAME 'mSMQJournalQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2106 NAME 'msSPP-CSVLKPartialProductKey' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1190 NAME 'msRASSavedFramedIPAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2171 NAME 'msKds-SecretAgreementAlgorithmID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.703 NAME 'dhcpObjDescription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.890 NAME 'uPNSuffixes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1720 NAME 'msDS-ReplicationEpoch' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.24 NAME 'bootFile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.614 NAME 'adminContextMenu' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.2.231 NAME 'oMSyntax' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.8 NAME 'userAccountControl' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.621 NAME 'ipsecID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.511 NAME 'flatName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.784 NAME 'aCSIdentityName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.15 NAME 'msiScriptPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.125 NAME 'supplementalCredentials' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2287 NAME 'msDS-AuthNPolicySiloMembers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.199 NAME 'serviceInstanceVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1347 NAME 'sPNMappings' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.933 NAME 'mSMQComputerType' SYNTAX '1.2.840.113556.1.4.905' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.780 NAME 'aCSNonReservedTxLimit' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1227 NAME 'mSMQRoutingServices' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2074 NAME 'msTSPrimaryDesktopBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 2.5.18.1 NAME 'createTimeStamp' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.3.6.1.1.1.1.19 NAME 'ipHostNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.1130 NAME 'msNPSavedCallingStationID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.700 NAME 'dhcpFlags' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.629 NAME 'ipsecFilterReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.40 NAME 'fromServer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.568 NAME 'meetingKeyword' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2178 NAME 'msKds-UseStartTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1321 NAME 'aCSNonReservedMinPolicedSize' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.246 NAME 'printLanguage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.2.54 NAME 'tombstoneLifetime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.765 NAME 'aCSPermissionBits' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.8 NAME 'shadowWarning' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1398 NAME 'mS-SQL-LastBackupDate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2 NAME 'objectGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.146 NAME 'company' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1710 NAME 'msDS-AllowedDNSSuffixes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1841 NAME 'msDS-ObjectReferenceBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 2.5.4.8 NAME 'st' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.341 NAME 'msSFU30YpServers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 2.5.4.4 NAME 'sn' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.707 NAME 'dhcpRanges' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.282 NAME 'printMemory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.924 NAME 'mSMQPrivacyLevel' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.333 NAME 'oMTIndxGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.205 NAME 'pKTGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2251 NAME 'msDS-DevicePhysicalIDs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1410 NAME 'mS-DS-CreatorSID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.345 NAME 'groupPriority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2030 NAME 'msDFS-SchemaMajorVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.513 NAME 'siteObjectBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.87 NAME 'nETBIOSName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2175 NAME 'msKds-RootKeyData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.24 NAME 'mustContain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' )",
"( 2.5.4.51 NAME 'houseIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.3.6.1.1.1.1.26 NAME 'nisMapName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1917 NAME 'msRADIUS-FramedIpv6Route' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.6.18.1.307 NAME 'msSFU30MasterServerName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.98 NAME 'primaryGroupID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1837 NAME 'msDs-masteredBy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.200 NAME 'controlAccessRights' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1158 NAME 'msRADIUSFramedRoute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.107 NAME 'remoteSource' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1971 NAME 'msDS-LastFailedInteractiveLogonTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1958 NAME 'msDS-AuthenticatedAtDC' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 2.5.4.5 NAME 'serialNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.509 NAME 'serviceClassName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2050 NAME 'msPKI-CredentialRoamingTokens' SYNTAX '1.2.840.113556.1.4.903' )",
"( 1.2.840.113556.1.4.2008 NAME 'msTSManagingLS4' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2005 NAME 'msTSManagingLS3' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2002 NAME 'msTSManagingLS2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1363 NAME 'mS-SQL-Name' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.41 NAME 'mobile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2108 NAME 'msTPM-OwnerInformationTemp' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.886 NAME 'purportedSearch' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1384 NAME 'mS-SQL-PublicationURL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2271 NAME 'msDS-CloudIsManaged' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.41 NAME 'generatedConnection' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.864 NAME 'netbootSCPBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1396 NAME 'mS-SQL-Size' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.115 NAME 'rpcNsInterfaceID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.56 NAME 'documentPublisher' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.619 NAME 'dNSHostName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2273 NAME 'msDS-CloudAnchor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.939 NAME 'mSMQNameStyle' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.882 NAME 'fRSVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.777 NAME 'aCSDSBMRefresh' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.80 NAME 'minTicketAge' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1310 NAME 'mSMQSiteGatesMig' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.83 NAME 'monikerDisplayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2150 NAME 'msAuthz-EffectiveSecurityPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.15 NAME 'hasPartialReplicaNCs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2194 NAME 'msDS-TDOEgressBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1811 NAME 'msDS-TasksForAzTaskBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1987 NAME 'msTSConnectPrinterDrives' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1953 NAME 'ms-net-ieee-80211-GP-PolicyReserved' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1313 NAME 'aCSMaxTokenBucketPerFlow' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.579 NAME 'meetingOwner' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.12 NAME 'badPwdCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.39 NAME 'forceLogoff' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.326 NAME 'perRecipDialogDisplayTable' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.51 NAME 'lastLogoff' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1796 NAME 'msDS-AzScriptEngineCacheMax' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2269 NAME 'msDS-IssuerPublicCertificates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1639 NAME 'msWMI-Name' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.4 NAME 'replUpToDateVector' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.470 NAME 'trustAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.515 NAME 'serverReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.308 NAME 'msSFU30OrderNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1669 NAME 'msDS-Approx-Immed-Subordinates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2141 NAME 'msDNS-SignatureInceptionOffset' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2186 NAME 'msDS-IsPossibleValuesPresent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.78 NAME 'minPwdAge' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.339 NAME 'msSFU30NisDomain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1701 NAME 'msTAPI-IpAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.249 NAME 'cOMCLSID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.774 NAME 'aCSMaxNoOfLogFiles' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.494 NAME 'siteServer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.849 NAME 'netbootAllowNewClients' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1931 NAME 'msDS-KrbTgtLinkBl' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1789 NAME 'msDS-AllUsersTrustQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2156 NAME 'msAuthz-MemberRulesInCentralAccessPolicyBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.721 NAME 'ipPhone' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.613 NAME 'employeeType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1634 NAME 'msWMI-Int8Min' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2248 NAME 'msDS-IsEnabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1950 NAME 'msDS-AzGenericData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1646 NAME 'msWMI-TargetNameSpace' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.816 NAME 'fileExtPriority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.712 NAME 'optionDescription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.321 NAME 'requiredCategories' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.255 NAME 'addressSyntax' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2110 NAME 'msTPM-TpmInformationForComputerBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1785 NAME 'msIIS-FTPRoot' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.518 NAME 'defaultHidingValue' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.946 NAME 'mSMQCost' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 2.5.4.44 NAME 'generationQualifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.681 NAME 'indexedScopes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.3.6.1.1.1.1.27 NAME 'nisMapEntry' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1865 NAME 'msDS-PrincipalName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2073 NAME 'msTSPrimaryDesktop' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.697 NAME 'cACertificateDN' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1354 NAME 'scopeFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1942 NAME 'msDS-PhoneticFirstName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.21 NAME 'ipNetmaskNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1997 NAME 'msDS-HABSeniorityIndex' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1307 NAME 'accountNameHistory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.893 NAME 'gPCFunctionalityVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2203 NAME 'msDS-parentdistname' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1687 NAME 'extraColumns' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1834 NAME 'msDS-ExternalStore' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1983 NAME 'msTSMaxIdleTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.687 NAME 'cAConnect' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2048 NAME 'templateRoots2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.154 NAME 'serverState' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1224 NAME 'parentGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.926 NAME 'mSMQTransactional' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.925 NAME 'mSMQOwnerID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2035 NAME 'msDFS-Ttlv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.150 NAME 'adminCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2041 NAME 'msDFS-LinkIdentityGUIDv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.490 NAME 'fRSDSPoll' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2015 NAME 'msDS-PasswordComplexityEnabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.105 NAME 'remoteServerName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.531 NAME 'nonSecurityMemberBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 2.16.840.1.113730.3.1.36 NAME 'thumbnailLogo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.586 NAME 'meetingRecurrence' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1643 NAME 'msWMI-QueryLanguage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.962 NAME 'mSMQQueueQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1415 NAME 'mSMQLabelEx' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.16 NAME 'nCName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2170 NAME 'msKds-KDFParam' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.567 NAME 'meetingDescription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1704 NAME 'msDS-NCReplCursors' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.23 NAME 'facsimileTelephoneNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.851 NAME 'netbootMaxClients' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2016 NAME 'msDS-PasswordReversibleEncryptionEnabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1635 NAME 'msWMI-Int8ValidValues' SYNTAX '1.2.840.113556.1.4.906' )",
"( 1.2.840.113556.1.4.719 NAME 'dhcpMaxKey' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1835 NAME 'msDS-Integer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.4.1208 NAME 'aNR' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1393 NAME 'mS-SQL-Database' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.42 NAME 'pager' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1914 NAME 'msRADIUS-SavedFramedInterfaceId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1391 NAME 'mS-SQL-Type' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.566 NAME 'meetingName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.123 NAME 'serviceClassInfo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.26 NAME 'creationTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.103 NAME 'proxyLifetime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.660 NAME 'treeName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.892 NAME 'gPOptions' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.923 NAME 'mSMQAuthenticate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1432 NAME 'msPKI-Certificate-Name-Flag' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.206 NAME 'pKT' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.287 NAME 'printNetworkAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1431 NAME 'msPKI-Private-Key-Flag' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1346 NAME 'templateRoots' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.657 NAME 'serviceDNSName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.868 NAME 'isCriticalSystemObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.301 NAME 'garbageCollPeriod' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.288 NAME 'printMACAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1304 NAME 'sDRightsEffective' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.380 NAME 'extendedCharsAllowed' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.86 NAME 'userWorkstations' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1360 NAME 'mS-DS-ConsistencyGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1712 NAME 'msPKI-OIDLocalizedName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.21.5 NAME 'attributeTypes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.31 NAME 'fRSReplicaSetType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.4.1.250.1.57 NAME 'labeledURI' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.341 NAME 'appliesTo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.11 NAME 'ou' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2293 NAME 'msDS-ServiceAuthNPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.346 NAME 'msSFU30PosixMember' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1973 NAME 'msDS-FailedInteractiveLogonCountAtLastSuccessfulLogon' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 2.5.18.10 NAME 'subSchemaSubEntry' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2262 NAME 'msDS-ApproximateLastLogonTimeStamp' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.222 NAME 'location' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.854 NAME 'netbootAnswerOnlyValidClients' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1949 NAME 'msDS-AzObjectGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 2.16.840.1.113730.3.1.34 NAME 'middleName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2158 NAME 'msDS-ClaimSourceType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.6 NAME 'roomNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.706 NAME 'dhcpMask' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.2109 NAME 'msTPM-TpmInformationForComputer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.623 NAME 'ipsecData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1171 NAME 'msRADIUSServiceType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.705 NAME 'dhcpSubnets' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.1999 NAME 'msFVE-KeyPackage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1968 NAME 'msDS-NC-RO-Replica-Locations-BL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.36 NAME 'enabledConnection' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.472 NAME 'domainCrossRef' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.52 NAME 'lastLogon' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.28 NAME 'dnsRoot' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.966 NAME 'mSMQDigestsMig' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.878 NAME 'fRSPrimaryMember' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1848 NAME 'msDS-QuotaEffective' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1372 NAME 'mS-SQL-UnicodeSortOrder' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.639 NAME 'isMemberOfPartialAttributeSet' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.464 NAME 'wWWHomePage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.711 NAME 'superScopeDescription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1631 NAME 'msWMI-IntValidValues' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.3.6.1.1.1.1.2 NAME 'gecos' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2063 NAME 'msDS-OptionalFeatureFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.38 NAME 'flags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1240 NAME 'netbootSIFFile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.690 NAME 'cAUsages' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 0.9.2342.19200300.100.1.60 NAME 'jpegPhoto' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2104 NAME 'msDS-MembersOfResourcePropertyListBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.66 NAME 'lSACreationTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.709 NAME 'dhcpReservations' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.4.934 NAME 'mSMQForeign' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1963 NAME 'msDS-SupportedEncryptionTypes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1401 NAME 'mS-SQL-Keywords' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1705 NAME 'msDS-NCReplInboundNeighbors' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2128 NAME 'msDNS-KeymasterZones' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.19 NAME 'physicalDeliveryOfficeName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1694 NAME 'gPCWQLFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.195 NAME 'systemPossSuperiors' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.218 NAME 'oMObjectClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1226 NAME 'mSMQDependentClientServices' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1676 NAME 'msWMI-Class' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2144 NAME 'msDNS-SigningKeys' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1630 NAME 'msWMI-IntMin' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.372 NAME 'rIDPreviousAllocationPool' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.848 NAME 'appSchemaVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1981 NAME 'msTSMaxDisconnectionTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1645 NAME 'msWMI-TargetClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.535 NAME 'fRSRootSecurity' SYNTAX '1.2.840.113556.1.4.907' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1423 NAME 'msCOM-PartitionLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 2.5.4.32 NAME 'owner' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1436 NAME 'msPKI-Cert-Template-OID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1716 NAME 'msDS-IntId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.18.1.309 NAME 'msSFU30Name' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.254 NAME 'cOMTypelibId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1642 NAME 'msWMI-Query' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.155 NAME 'uASCompat' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1623 NAME 'msWMI-Author' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1964 NAME 'msFVE-RecoveryPassword' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.37 NAME 'associatedDomain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.764 NAME 'aCSPriority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.571 NAME 'meetingType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.783 NAME 'defaultObjectCategory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1365 NAME 'mS-SQL-Contact' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.679 NAME 'creator' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.39 NAME 'certificateRevocationList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.699 NAME 'dhcpType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1405 NAME 'mS-SQL-AllowQueuedUpdatingSubscription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.915 NAME 'possibleInferiors' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2234 NAME 'netbootDUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.899 NAME 'aCSEnableRSVPAccounting' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.881 NAME 'fRSTimeLastConfigChange' SYNTAX '1.3.6.1.4.1.1466.115.121.1.53' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.55 NAME 'audio' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.3.6.1.1.1.1.13 NAME 'memberNisNetgroup' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.898 NAME 'aCSNonReservedTxSize' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.771 NAME 'servicePrincipalName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1820 NAME 'msDS-HasDomainNCs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2070 NAME 'msTSEndpointData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.620 NAME 'ipsecName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.458 NAME 'qualityOfService' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2042 NAME 'msDFS-ShortNameLinkPathv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1688 NAME 'msDS-Security-Group-Extra-Classes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2289 NAME 'msDS-UserAuthNPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.83 NAME 'repsTo' SYNTAX 'OctetString' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1357 NAME 'dSCorePropagationData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2196 NAME 'msDS-ManagedPassword' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.647 NAME 'otherMobile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2072 NAME 'msTSEndpointPlugin' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.749 NAME 'url' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.701 NAME 'dhcpIdentification' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.122 NAME 'serviceClassID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2058 NAME 'isRecycled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.213 NAME 'defaultClassStore' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.2252 NAME 'msDS-DeviceID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.633 NAME 'policyReplicationFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1693 NAME 'msFRS-Hub-Member' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1328 NAME 'pKIKeyUsage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.459 NAME 'networkAddress' SYNTAX '1.2.840.113556.1.4.905' )",
"( 1.2.840.113556.1.4.1786 NAME 'msIIS-FTPDir' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.806 NAME 'treatAsLeaf' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.820 NAME 'bridgeheadServerListBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 0.9.2342.19200300.100.1.15 NAME 'documentLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.13.3.36 NAME 'msDFSR-OnDemandExclusionDirectoryFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.640 NAME 'partialAttributeSet' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.853 NAME 'netbootAnswerRequests' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 2.5.4.31 NAME 'member' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.6.18.1.323 NAME 'msSFU30Aliases' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.1243 NAME 'mSMQQueueNameExt' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1370 NAME 'mS-SQL-CharacterSet' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1622 NAME 'msDS-Entry-Time-To-Die' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.460 NAME 'lDAPDisplayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2031 NAME 'msDFS-SchemaMinorVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.12 NAME 'memberUid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.1800 NAME 'msDS-AzOperationID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.322 NAME 'categoryId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.60 NAME 'lockoutDuration' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.870 NAME 'frsComputerReferenceBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 2.5.4.45 NAME 'x500uniqueIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.6.13.3.25 NAME 'msDFSR-Priority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.847 NAME 'installUiLevel' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1842 NAME 'msDs-MaxValues' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 2.5.4.9 NAME 'street' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2154 NAME 'msAuthz-CentralAccessPolicyID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.3 NAME 'whenChanged' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1433 NAME 'msPKI-Minimal-Key-Size' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1814 NAME 'msDS-TasksForAzRole' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.6.13.3.101 NAME 'msDFSR-ComputerReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.580 NAME 'meetingIP' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.539 NAME 'initialAuthIncoming' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.356 NAME 'foreignIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.565 NAME 'meetingID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.3 NAME 'unixHomeDirectory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1721 NAME 'msDS-UpdateScript' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.557 NAME 'parentCA' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.255 NAME 'vendor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.900 NAME 'aCSRSVPAccountFilesLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1320 NAME 'aCSNonReservedMaxSDUSize' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1956 NAME 'ms-net-ieee-8023-GP-PolicyReserved' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.773 NAME 'aCSRSVPLogFilesLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.951 NAME 'mSMQQMID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.102 NAME 'memberOf' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1397 NAME 'mS-SQL-CreationDate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2146 NAME 'msDNS-ParentHasSecureDelegation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.113 NAME 'rpcNsBindings' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.656 NAME 'userPrincipalName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1934 NAME 'msDS-IsPartialReplicaFor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2167 NAME 'msDS-PrimaryComputer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.469 NAME 'USNIntersite' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1803 NAME 'msDS-AzLastImportedBizRulePath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2295 NAME 'msDS-AssignedAuthNPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 2.5.4.13 NAME 'description' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.922 NAME 'mSMQLabel' SYNTAX '1.2.840.113556.1.4.905' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2024 NAME 'msDS-NcType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2011 NAME 'msDS-MaximumPasswordAge' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2291 NAME 'msDS-ComputerAuthNPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1832 NAME 'msDS-DateTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' )",
"( 1.2.840.113556.1.2.281 NAME 'nTSecurityDescriptor' SYNTAX '1.2.840.113556.1.4.907' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.722 NAME 'otherIpPhone' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1368 NAME 'mS-SQL-Build' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.219 NAME 'iconPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1417 NAME 'mSMQComputerTypeEx' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.38 NAME 'associatedName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1986 NAME 'msTSConnectClientDrives' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2285 NAME 'msDS-AssignedAuthNPolicySilo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1638 NAME 'msWMI-Mof' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.314 NAME 'rpcNsTransferSyntax' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1702 NAME 'msDS-TrustForestTrustInfo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.557 NAME 'Enabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.21 NAME 'subClassOf' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 0.9.2342.19200300.100.1.44 NAME 'uniqueIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1845 NAME 'msDS-QuotaAmount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1119 NAME 'msNPAllowDialin' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.33 NAME 'isSingleValued' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.782 NAME 'objectCategory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2177 NAME 'msKds-DomainID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2195 NAME 'msDS-AppliesToResourceTypes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.152 NAME 'groupAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.334 NAME 'volTableIdxGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.272 NAME 'printNotify' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.334 NAME 'searchFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2298 NAME 'msDS-AuthNPolicySiloEnforced' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1428 NAME 'msCOM-ObjectId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.400 NAME 'addressEntryDisplayTableMSDOS' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.81 NAME 'modifiedCountAtLastProm' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.71 NAME 'machineRole' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1123 NAME 'msNPCalledStationID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.654 NAME 'managedObjects' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.638 NAME 'isPrivilegeHolder' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.197 NAME 'systemMustContain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.91 NAME 'otherLoginWorkstations' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.13.3.32 NAME 'msDFSR-DisablePacketPrivacy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2297 NAME 'msDS-AuthNPolicyEnforced' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.576 NAME 'meetingMaxParticipants' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.4 NAME 'loginShell' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.779 NAME 'aCSCacheTimeout' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.751 NAME 'userSharedFolder' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.342 NAME 'msSFU30MaxGidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1380 NAME 'mS-SQL-Status' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.13 NAME 'builtinCreationTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.277 NAME 'printMaxXExtent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.230 NAME 'printSeparatorFile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1387 NAME 'mS-SQL-GPSHeight' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2054 NAME 'msImaging-PSPString' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.235 NAME 'printFormName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.20 NAME 'telephoneNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1621 NAME 'msDS-Other-Settings' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.18.1.304 NAME 'msSFU30SearchAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.21.9 NAME 'structuralObjectClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' )",
"( 1.2.840.113556.1.4.659 NAME 'serviceDNSNameType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.902 NAME 'aCSMaxSizeOfRSVPAccountFile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.569 NAME 'meetingLocation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.261 NAME 'division' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1640 NAME 'msWMI-NormalizedClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.300 NAME 'printerName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1960 NAME 'msDS-isRODC' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.268 NAME 'eFSPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1824 NAME 'msDS-AzMajorVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2134 NAME 'msDNS-DSRecordAlgorithms' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.270 NAME 'printShareName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1400 NAME 'mS-SQL-Applications' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1312 NAME 'aCSServerList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1376 NAME 'mS-SQL-SPX' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.368 NAME 'rIDManagerReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1371 NAME 'mS-SQL-SortOrder' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.118 NAME 'otherPager' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1894 NAME 'msPKIAccountCredentials' SYNTAX '1.2.840.113556.1.4.903' )",
"( 1.2.840.113556.1.6.13.3.16 NAME 'msDFSR-Flags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1301 NAME 'tokenGroups' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1626 NAME 'msWMI-CreationDate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.14 NAME 'hasMasterNCs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.153 NAME 'rid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2084 NAME 'msSPP-ConfirmationId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.2 NAME 'msDFSR-Extension' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1846 NAME 'msDS-DefaultQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.35 NAME 'rangeUpper' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1633 NAME 'msWMI-Int8Max' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.48 NAME 'isDeleted' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1327 NAME 'pKIDefaultKeySpec' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1408 NAME 'mS-DS-ReplicatesNCReason' SYNTAX '1.2.840.113556.1.4.903' )",
"( 1.2.840.113556.1.4.1816 NAME 'msDS-AzClassId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2088 NAME 'msSPP-IssuanceLicense' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1672 NAME 'msPKI-OID-CPS' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.590 NAME 'meetingBlob' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.72 NAME 'marshalledInterface' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1385 NAME 'mS-SQL-GPSLatitude' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2097 NAME 'msDS-ClaimPossibleValues' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.26 NAME 'msDFSR-DeletedPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1826 NAME 'msDS-RetiredReplNCSignatures' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2143 NAME 'msDNS-SigningKeyDescriptors' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.491 NAME 'fRSFaultCondition' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2278 NAME 'msDS-UserAllowedToAuthenticateFrom' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2017 NAME 'msDS-LockoutObservationWindow' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2130 NAME 'msDNS-IsSigned' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2057 NAME 'msDS-HostServiceAccountBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.683 NAME 'cRLPartitionedRevocationList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.481 NAME 'schemaUpdate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1332 NAME 'pKIOverlapPeriod' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.692 NAME 'previousCACertificates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.573 NAME 'meetingApplication' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1849 NAME 'msDS-QuotaUsed' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.20 NAME 'ipNetworkNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.517 NAME 'ipsecPolicyReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1822 NAME 'msieee80211-DataType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.664 NAME 'syncWithObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2183 NAME 'msDS-GeoCoordinatesAltitude' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.284 NAME 'bytesPerMinute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.139 NAME 'profilePath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.40 NAME 'crossCertificatePair' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1929 NAME 'msDS-SecondaryKrbTgtNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2294 NAME 'msDS-ServiceAuthNPolicyBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1414 NAME 'dNSTombstoned' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.104 NAME 'ownerBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1930 NAME 'msDS-RevealedDSAs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2240 NAME 'msDS-IssuerCertificates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1692 NAME 'msFRS-Topology-Pref' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.158 NAME 'domainReplica' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.2 NAME 'whenCreated' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.76 NAME 'maxStorage' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.484 NAME 'fRSDirectoryFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1916 NAME 'msRADIUS-SavedFramedIpv6Prefix' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2087 NAME 'msSPP-ConfigLicense' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.627 NAME 'ipsecNFAReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.351 NAME 'auxiliaryClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' )",
"( 1.2.840.113556.1.2.50 NAME 'linkID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1718 NAME 'msDS-AdditionalSamAccountName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.35 NAME 'msDFSR-OnDemandExclusionFileFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.577 NAME 'meetingOriginator' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.169 NAME 'showInAdvancedViewOnly' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.582 NAME 'meetingAdvertiseScope' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.48 NAME 'buildingName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2181 NAME 'msImaging-HashAlgorithm' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2101 NAME 'msDS-ClaimSharesPossibleValuesWith' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.24 NAME 'contentIndexingAllowed' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.39 NAME 'msDFSR-CommonStagingSizeInMb' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2135 NAME 'msDNS-RFC5011KeyRollovers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.682 NAME 'friendlyNames' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2071 NAME 'msTSEndpointType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2263 NAME 'msDS-RegisteredUsers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2062 NAME 'msDS-OptionalFeatureGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.767 NAME 'aCSMaxPeakBandwidth' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 2.5.4.28 NAME 'preferredDeliveryMethod' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.4.919 NAME 'mSMQQuota' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.327 NAME 'packageFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.382 NAME 'dnsRecord' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.755 NAME 'domainIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.872 NAME 'fRSControlInboundBacklog' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.928 NAME 'mSMQOutRoutingServers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.768 NAME 'aCSEnableRSVPMessageLogging' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.585 NAME 'meetingIsEncrypted' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.34 NAME 'rangeLower' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1361 NAME 'mS-DS-ConsistencyChildCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2192 NAME 'msDS-EgressClaimsTransformationPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2292 NAME 'msDS-ComputerAuthNPolicyBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.843 NAME 'lDAPAdminLimits' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1847 NAME 'msDS-TombstoneQuotaFactor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1355 NAME 'queryFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.16 NAME 'postalAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.307 NAME 'options' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.74 NAME 'dSASignature' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.380 NAME 'dnsSecureSecondaries' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.4.634 NAME 'privilegeDisplayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.598 NAME 'dmdName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1399 NAME 'mS-SQL-LastDiagnosticDate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2280 NAME 'msDS-ComputerAllowedToAuthenticateTo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.350 NAME 'addressType' SYNTAX '1.2.840.113556.1.4.905' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.38 NAME 'msDFSR-CommonStagingPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.672 NAME 'categories' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1675 NAME 'msPKI-RA-Application-Policies' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1244 NAME 'addressBookRoots' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.336 NAME 'volTableGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.65 NAME 'logonWorkstation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2153 NAME 'msAuthz-ResourceCondition' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.34 NAME 'msDFSR-DefaultCompressionExclusionFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.756 NAME 'aCSTimeOfDay' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2276 NAME 'msDS-SyncServerUrl' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.710 NAME 'superScopes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.44' )",
"( 1.2.840.113556.1.2.210 NAME 'proxyAddresses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.18.1.348 NAME 'msSFU30NetgroupHostAtDomain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.1306 NAME 'dNSProperty' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.141 NAME 'department' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.871 NAME 'fRSControlDataCreation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.253 NAME 'cOMOtherProgId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1337 NAME 'mSMQUserSid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 0.9.2342.19200300.100.1.14 NAME 'documentAuthor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 2.5.4.37 NAME 'cACertificate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.698 NAME 'dhcpUniqueKey' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1980 NAME 'msTSRemoteControl' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.9 NAME 'host' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2081 NAME 'msSPP-CSVLKSkuId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.483 NAME 'fRSFileFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2168 NAME 'msDS-IsPrimaryComputerFor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.74 NAME 'maxPwdAge' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1374 NAME 'mS-SQL-NamedPipe' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1972 NAME 'msDS-FailedInteractiveLogonCount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1649 NAME 'msWMI-TargetType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.668 NAME 'domainCAs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.2021 NAME 'msDS-PSOApplied' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.25 NAME 'countryCode' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.160 NAME 'lmPwdHistory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.275 NAME 'printKeepPrintedJobs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2014 NAME 'msDS-PasswordHistoryLength' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1836 NAME 'msDS-hasMasterNCs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1813 NAME 'msDS-OperationsForAzRoleBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.212 NAME 'dSHeuristics' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.877 NAME 'fRSPartnerAuthLevel' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.13 NAME 'displayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.269 NAME 'linkTrackSecret' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1239 NAME 'mSMQDependentClientService' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.238 NAME 'printMaxResolutionSupported' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.325 NAME 'perMsgDialogDisplayTable' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.819 NAME 'bridgeheadTransportList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.540 NAME 'initialAuthOutgoing' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.523 NAME 'proxyGenerationEnabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.760 NAME 'aCSAggregateTokenRatePerUser' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.381 NAME 'dnsNotifySecondaries' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 2.5.4.21 NAME 'telexNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.117 NAME 'rpcNsPriority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.6.18.1.300 NAME 'msSFU30SearchContainer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.960 NAME 'mSMQNt4Stub' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.4.844 NAME 'lDAPIPDenyList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.918 NAME 'mSMQJournal' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.343 NAME 'msSFU30MaxUidNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1442 NAME 'msDS-Cached-Membership-Time-Stamp' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1458 NAME 'msDS-Auxiliary-Classes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.821 NAME 'siteList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1782 NAME 'msDS-KeyVersionNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 2.5.4.50 NAME 'uniqueMember' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1797 NAME 'msDS-AzScriptTimeout' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1812 NAME 'msDS-OperationsForAzRole' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.809 NAME 'remoteStorageGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.231 NAME 'priority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.37 NAME 'msDFSR-Options2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2083 NAME 'msSPP-InstallationId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.58 NAME 'attributeCertificateAttribute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.6.18.1.302 NAME 'msSFU30FieldSeparator' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.532 NAME 'superiorDNSRoot' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.822 NAME 'siteLinkList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1366 NAME 'mS-SQL-Location' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.94 NAME 'ntPwdHistory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1 NAME 'name' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1629 NAME 'msWMI-IntMax' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.118 NAME 'rpcNsProfileEntry' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2049 NAME 'msDS-BridgeHeadServersUsed' SYNTAX '1.2.840.113556.1.4.903' )",
"( 1.2.840.113556.1.4.1969 NAME 'samDomainUpdates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.889 NAME 'additionalTrustedServiceNames' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.77 NAME 'maxTicketAge' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1661 NAME 'msDS-NC-Replica-Locations' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1783 NAME 'msDS-ExecuteScriptPassword' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.49 NAME 'mAPIID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.9 NAME 'msDFSR-Enabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.250 NAME 'cOMUniqueLIBID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.18 NAME 'postOfficeBox' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2067 NAME 'msDS-LastKnownRDN' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1344 NAME 'dSUIAdminMaximum' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1153 NAME 'msRADIUSFramedIPAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1799 NAME 'msDS-AzScopeName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2013 NAME 'msDS-MinimumPasswordLength' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.114 NAME 'rpcNsGroup' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.146 NAME 'objectSid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.6 NAME 'msDFSR-StagingSizeInMb' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.365 NAME 'operatingSystemServicePack' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.21.6 NAME 'objectClasses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1698 NAME 'msTAPI-uid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.256 NAME 'streetAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1191 NAME 'msRASSavedFramedRoute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.1965 NAME 'msFVE-RecoveryGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2166 NAME 'msDS-GenerationId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1359 NAME 'otherWellKnownObjects' SYNTAX '1.2.840.113556.1.4.903' )",
"( 1.2.840.113556.1.4.1940 NAME 'msDS-RevealedList' SYNTAX '1.2.840.113556.1.4.904' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2282 NAME 'msDS-ServiceAllowedToAuthenticateTo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.379 NAME 'dnsAllowXFR' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.628 NAME 'ipsecNegotiationPolicyReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1976 NAME 'msTSProfilePath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2076 NAME 'msPKI-Enrollment-Servers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.53 NAME 'deltaRevocationList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.18 NAME 'otherTelephone' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2077 NAME 'msPKI-Site-Name' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1316 NAME 'aCSMinimumLatency' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2157 NAME 'msDS-ClaimSource' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1970 NAME 'msDS-LastSuccessfulInteractiveLogonTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.280 NAME 'printMinYExtent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.415 NAME 'operatingSystemHotfix' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.306 NAME 'msSFU30MapFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.610 NAME 'classDisplayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1381 NAME 'mS-SQL-LastUpdatedDate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1957 NAME 'msDS-AuthenticatedToAccountlist' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1825 NAME 'msDS-AzMinorVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2198 NAME 'msDS-ManagedPasswordPreviousId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2068 NAME 'msDS-DeletedObjectLifetime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2095 NAME 'msDS-IsUsedAsResourceSecurityAttribute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.786 NAME 'mailAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.373 NAME 'rIDUsedPool' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.19 NAME 'msDFSR-RdcEnabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.44 NAME 'homeDirectory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.538 NAME 'prefixMap' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2034 NAME 'msDFS-LastModifiedv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2155 NAME 'msAuthz-MemberRulesInCentralAccessPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.947 NAME 'mSMQSignCertificates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.714 NAME 'dhcpOptions' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2060 NAME 'msDS-LocalEffectiveRecycleTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.675 NAME 'catalogs' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.134 NAME 'trustPosixOffset' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1404 NAME 'mS-SQL-AllowImmediateUpdatingSubscription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2047 NAME 'globalAddressList2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.135 NAME 'cost' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1331 NAME 'pKIExpirationPeriod' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.45 NAME 'organizationalStatus' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.5.4.15 NAME 'businessCategory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.13.3.4 NAME 'msDFSR-RootSizeInMb' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.869 NAME 'frsComputerReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1893 NAME 'msPKIDPAPIMasterKeys' SYNTAX '1.2.840.113556.1.4.903' )",
"( 1.2.840.113556.1.4.1430 NAME 'msPKI-Enrollment-Flag' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.45 NAME 'homeDrive' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2085 NAME 'msSPP-OnlineLicense' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.196 NAME 'systemMayContain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.90 NAME 'unicodePwd' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.763 NAME 'aCSTotalNoOfFlows' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1806 NAME 'msDS-MembersForAzRole' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.873 NAME 'fRSControlOutboundBacklog' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.89 NAME 'nTGroupMembers' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.815 NAME 'canUpgradeScript' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.96 NAME 'pwdLastSet' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.228 NAME 'portName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1821 NAME 'msieee80211-Data' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.720 NAME 'dhcpUpdateTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 2.5.4.33 NAME 'roleOccupant' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1818 NAME 'msDS-AzTaskIsRoleDefinition' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.16 NAME 'ipServiceProtocol' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.488 NAME 'fRSStagingPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.25 NAME 'dc' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.502 NAME 'timeVolChange' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.303 NAME 'notificationList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.952 NAME 'mSMQMigrated' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2290 NAME 'msDS-UserAuthNPolicyBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.53 NAME 'lastSetTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.894 NAME 'gPCFileSysPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.22 NAME 'teletexTerminalIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.471 NAME 'schemaVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' )",
"( 1.2.840.113556.1.2.91 NAME 'repsFrom' SYNTAX 'OctetString' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.5 NAME 'msDFSR-StagingPath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.15 NAME 'ipServicePort' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.781 NAME 'lastKnownParent' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 2.5.4.43 NAME 'initials' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.901 NAME 'aCSMaxNoOfAccountFiles' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1928 NAME 'msDS-RevealOnDemandGroup' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1689 NAME 'msDS-Non-Security-Group-Extra-Classes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.344 NAME 'groupsToIgnore' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.896 NAME 'uSNSource' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.964 NAME 'mSMQNt4Flags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2102 NAME 'msDS-ClaimSharesPossibleValuesWithBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 2.5.4.29 NAME 'presentationAddress' SYNTAX '1.3.6.1.4.1.1466.115.121.1.43' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2051 NAME 'msDS-OIDToGroupLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.369 NAME 'fSMORoleOwner' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1954 NAME 'ms-net-ieee-8023-GP-PolicyGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.648 NAME 'primaryTelexNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2283 NAME 'msDS-ServiceAllowedToAuthenticateFrom' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 2.5.4.12 NAME 'title' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.1 NAME 'uid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1247 NAME 'interSiteTopologyRenew' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1697 NAME 'msDS-Settings' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.247 NAME 'printAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2188 NAME 'msDS-ValueTypeReferenceBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2133 NAME 'msDNS-MaintainTrustAnchor' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.324 NAME 'msSFU30KeyValues' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.1378 NAME 'mS-SQL-AppleTalk' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1663 NAME 'msDS-Replication-Notify-First-DSA-Delay' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.121 NAME 'securityIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.748 NAME 'attributeDisplayNames' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.16.840.1.113730.3.1.35 NAME 'thumbnailPhoto' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2082 NAME 'msSPP-KMSIds' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.758 NAME 'aCSMaxTokenRatePerFlow' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.121 NAME 'uSNLastObjRem' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.875 NAME 'fRSMemberReference' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1349 NAME 'gPCUserExtensionNames' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.138 NAME 'userParameters' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.36 NAME 'userCertificate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.6.13.3.102 NAME 'msDFSR-MemberReferenceBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.131 NAME 'co' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.3 NAME 'cn' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.936 NAME 'mSMQEncryptKey' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.226 NAME 'adminDescription' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.34 NAME 'seeAlso' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.444 NAME 'msExchAssistantName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.667 NAME 'syncWithSID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1998 NAME 'msFVE-VolumeGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2107 NAME 'msTPM-SrkPubThumbprint' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.81 NAME 'info' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1686 NAME 'msWMI-ScopeGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.151 NAME 'oEMInformation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.935 NAME 'mSMQOSType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.374 NAME 'rIDNextRID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2039 NAME 'msDFS-LinkPathv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.141 NAME 'versionNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.505 NAME 'oMTGuid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.88 NAME 'nextRid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2037 NAME 'msDFS-Propertiesv2' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1994 NAME 'msTSLicenseVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.16.840.1.113730.3.140 NAME 'userSMIMECertificate' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1985 NAME 'msTSBrokenConnectionAction' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.281 NAME 'printStaplingSupported' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.17 NAME 'msDFSR-Options' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.485 NAME 'fRSUpdateTimeout' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1967 NAME 'msDS-NC-RO-Replica-Locations' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1819 NAME 'msDS-AzApplicationData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.347 NAME 'msSFU30PosixMemberOf' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1425 NAME 'msCOM-UserLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.6.13.3.24 NAME 'msDFSR-DfsLinkTarget' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.512 NAME 'siteObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.584 NAME 'meetingRating' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1794 NAME 'msDS-NonMembersBL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.776 NAME 'aCSDSBMPriority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.520 NAME 'machinePasswordChangeInterval' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.669 NAME 'rIDSetReferences' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.941 NAME 'mSMQLongLived' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1241 NAME 'netbootMirrorDataFile' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.18.1.305 NAME 'msSFU30ResultAttributes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2236 NAME 'msds-memberOfTransitive' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1427 NAME 'msCOM-DefaultPartitionLink' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.519 NAME 'lastBackupRestorationTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.337 NAME 'currMachineId' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.32 NAME 'attributeSyntax' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.362 NAME 'siteGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.823 NAME 'certificateTemplates' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 2.16.840.1.113730.3.1.39 NAME 'preferredLanguage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.352 NAME 'msSFU30CryptMethod' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1946 NAME 'msDS-PhoneticDisplayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.14 NAME 'searchGuide' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2270 NAME 'msDS-IsManaged' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.581 NAME 'meetingScope' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.673 NAME 'retiredReplDSASignatures' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.855 NAME 'netbootNewMachineNamingPolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1245 NAME 'globalAddressList' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.2.227 NAME 'extensionName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.884 NAME 'msRRASAttribute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.6.18.1.349 NAME 'msSFU30NetgroupUserAtDomain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.26' )",
"( 1.2.840.113556.1.4.680 NAME 'queryPoint' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.888 NAME 'iPSECNegotiationPolicyAction' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.29 NAME 'msDFSR-CachePolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.299 NAME 'printMediaSupported' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.503 NAME 'timeRefresh' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.11 NAME 'authenticationOptions' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.198 NAME 'systemAuxiliaryClass' SYNTAX '1.3.6.1.4.1.1466.115.121.1.38' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.857 NAME 'netbootIntelliMirrorOSes' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1406 NAME 'mS-SQL-AllowSnapshotFilesFTPDownloading' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1333 NAME 'pKIExtendedKeyUsage' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.2019 NAME 'msDS-LockoutThreshold' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1844 NAME 'msDS-QuotaTrustee' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.18.1.350 NAME 'msSFU30IsValidContainer' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.359 NAME 'netbootGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1719 NAME 'msDS-DnsRootAlias' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.909 NAME 'extendedAttributeInfo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' NO-USER-MODIFICATION )",
"( 1.3.6.1.1.1.1.10 NAME 'shadowExpire' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1248 NAME 'interSiteTopologyFailover' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2275 NAME 'msDS-CloudIsEnabled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.7' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.887 NAME 'iPSECNegotiationPolicyType' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2059 NAME 'msDS-LocalEffectiveDeletionTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.24' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.587 NAME 'meetingStartTime' SYNTAX '1.3.6.1.4.1.1466.115.121.1.53' )",
"( 2.5.4.17 NAME 'postalCode' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.2.445 NAME 'originalDisplayTable' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1990 NAME 'msTSInitialProgram' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.662 NAME 'lockoutTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.21 NAME 'secretary' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.95 NAME 'pwdHistoryLength' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.759 NAME 'aCSMaxPeakBandwidthPerFlow' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.753 NAME 'nameServiceFlags' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.694 NAME 'previousParentCA' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.142 NAME 'winsockAddresses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.2075 NAME 'msTSSecondaryDesktops' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.2105 NAME 'msSPP-CSVLKPid' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.49 NAME 'badPasswordTime' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2184 NAME 'msDS-GeoCoordinatesLatitude' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2079 NAME 'msDS-RequiredForestBehaviorVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1955 NAME 'ms-net-ieee-8023-GP-PolicyData' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.237 NAME 'printBinNames' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1382 NAME 'mS-SQL-InformationURL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.6.13.3.13 NAME 'msDFSR-DirectoryFilter' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.148 NAME 'schemaIDGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.2189 NAME 'msDS-TransformationRules' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 2.5.4.10 NAME 'o' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.504 NAME 'seqNotification' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 2.5.4.7 NAME 'l' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.57 NAME 'defaultLocalPolicyObject' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1801 NAME 'msDS-AzBizRule' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.948 NAME 'mSMQDigests' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.2.327 NAME 'helpFileName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.421 NAME 'domainWidePolicy' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 2.5.4.6 NAME 'c' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2139 NAME 'msDNS-DNSKEYRecordSetTTL' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.11 NAME 'documentIdentifier' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.609 NAME 'sIDHistory' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1711 NAME 'msDS-SDReferenceDomain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1367 NAME 'mS-SQL-Memory' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.715 NAME 'dhcpClasses' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1305 NAME 'moveTreeState' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.757 NAME 'aCSDirection' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.631 NAME 'printPagesPerMinute' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.145 NAME 'revision' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.646 NAME 'otherFacsimileTelephoneNumber' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )",
"( 1.2.840.113556.1.4.1798 NAME 'msDS-AzApplicationName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.366 NAME 'rpcNsAnnotation' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2190 NAME 'msDS-TransformationRulesCompiled' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.4.1636 NAME 'msWMI-StringDefault' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.813 NAME 'upgradeProductCode' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' )",
"( 1.2.840.113556.1.4.1951 NAME 'ms-net-ieee-80211-GP-PolicyGUID' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2098 NAME 'msDS-ClaimValueType' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.194 NAME 'adminDisplayName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.879 NAME 'fRSServiceCommandStatus' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.361 NAME 'netbootMachineFilePath' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.67 NAME 'lSAModifiedCount' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.920 NAME 'mSMQBasePriority' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2277 NAME 'msDS-UserAllowedToAuthenticateTo' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2056 NAME 'msDS-HostServiceAccount' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )",
"( 1.2.840.113556.1.4.1943 NAME 'msDS-PhoneticLastName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.2055 NAME 'msDS-USNLastSyncSuccess' SYNTAX '1.2.840.113556.1.4.906' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.101 NAME 'privateKey' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 2.5.4.42 NAME 'givenName' SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.685 NAME 'parentCACertificateChain' SYNTAX '1.3.6.1.4.1.1466.115.121.1.40' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.1924 NAME 'msDS-RevealedUsers' SYNTAX '1.2.840.113556.1.4.903' NO-USER-MODIFICATION )",
"( 1.2.840.113556.1.2.76 NAME 'objectVersion' SYNTAX '1.3.6.1.4.1.1466.115.121.1.27' SINGLE-VALUE )",
"( 1.2.840.113556.1.4.856 NAME 'netbootNewMachineOU' SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' SINGLE-VALUE )"
],
"cn": [
"Aggregate"
],
"dITContentRules": [
"( 1.2.840.113556.1.6.13.4.6 NAME 'msDFSR-Content' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.14 NAME 'device' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MAY (uid $ manager $ ipHostNumber $ macAddress $ bootParameter $ bootFile ))",
"( 1.2.840.113556.1.5.205 NAME 'msWMI-IntRangeParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.5 NAME 'samServer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.196 NAME 'msPKI-Enterprise-Oid' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.7000.53 NAME 'crossRefContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.7 NAME 'ipNetwork' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.5 NAME 'organizationalUnit' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.152 NAME 'intellimirrorGroup' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.253 NAME 'msFVE-RecoveryInformation' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.262 NAME 'msImaging-PSPs' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.251 NAME 'ms-net-ieee-80211-GroupPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.138 NAME 'aCSSubnet' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.43 NAME 'fTDfs' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.27 NAME 'rpcEntry')",
"( 1.2.840.113556.1.5.85 NAME 'dnsZone' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.4.2163 NAME 'msAuthz-CentralAccessRule' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.194 NAME 'msCOM-PartitionSet' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.242 NAME 'msDS-QuotaContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.281 NAME 'msDS-ClaimsTransformationPolicies' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.146 NAME 'remoteStorageServicePoint' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.2 NAME 'samDomainBase')",
"( 1.2.840.113556.1.5.132 NAME 'dHCPClass' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.283 NAME 'msDS-CloudExtensions')",
"( 1.2.840.113556.1.5.89 NAME 'nTFRSSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.24 NAME 'remoteMailRecipient' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (cn ) MAY (telephoneNumber $ userCertificate $ info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ showInAddressBook $ userCert $ legacyExchangeDN $ msDS-PhoneticDisplayName $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ userSMIMECertificate $ textEncodedORAddress $ secretary $ labeledURI ))",
"( 1.2.840.113556.1.5.221 NAME 'msTAPI-RtConference' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.201 NAME 'msWMI-SimplePolicyTemplate' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.18.2.212 NAME 'msSFU30NetId' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.49 NAME 'packageRegistration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.139 NAME 'lostAndFound' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.14 NAME 'connectionPoint')",
"( 1.2.840.113556.1.5.6 NAME 'securityPrincipal')",
"( 1.2.840.113556.1.5.147 NAME 'siteLink' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.255 NAME 'msDS-PasswordSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.4.2162 NAME 'msAuthz-CentralAccessRules' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.30 NAME 'serviceInstance' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.156 NAME 'rRASAdministrationDictionary' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.4.2164 NAME 'msAuthz-CentralAccessPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.16.840.1.113730.3.2.2 NAME 'inetOrgPerson' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (objectSid $ sAMAccountName ) MAY (info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ showInAddressBook $ userCert $ legacyExchangeDN $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber $ unixUserPassword $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ msDS-cloudExtensionAttribute1 $ msDS-cloudExtensionAttribute2 $ msDS-cloudExtensionAttribute3 $ msDS-cloudExtensionAttribute4 $ msDS-cloudExtensionAttribute5 $ msDS-cloudExtensionAttribute6 $ msDS-cloudExtensionAttribute7 $ msDS-cloudExtensionAttribute8 $ msDS-cloudExtensionAttribute9 $ msDS-cloudExtensionAttribute10 $ msDS-cloudExtensionAttribute11 $ msDS-cloudExtensionAttribute12 $ msDS-cloudExtensionAttribute13 $ msDS-cloudExtensionAttribute14 $ msDS-cloudExtensionAttribute15 $ msDS-cloudExtensionAttribute16 $ msDS-cloudExtensionAttribute17 $ msDS-cloudExtensionAttribute18 $ msDS-cloudExtensionAttribute19 $ msDS-cloudExtensionAttribute20 $ textEncodedORAddress $ uidNumber $ gidNumber $ gecos $ unixHomeDirectory $ loginShell $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag ))",
"( 1.2.840.113556.1.5.52 NAME 'fileLinkTracking' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.18 NAME 'domainPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.18.2.216 NAME 'msSFU30NetworkUser' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 0.9.2342.19200300.100.4.19 NAME 'simpleSecurityObject')",
"( 1.2.840.113556.1.5.177 NAME 'pKICertificateTemplate' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.293 NAME 'msDS-AuthNPolicies' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.2 NAME 'msDFSR-Subscriber' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.31 NAME 'site' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.222 NAME 'msTAPI-RtPerson' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.68 NAME 'applicationSiteSettings')",
"( 1.2.840.113556.1.3.14 NAME 'attributeSchema' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.267 NAME 'msSPP-ActivationObject' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.220 NAME 'msDS-App-Configuration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.23 NAME 'container' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.10 NAME 'msDFSR-Connection' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.207 NAME 'msWMI-UintRangeParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.23 NAME 'printQueue' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.260 NAME 'msDFS-DeletedLinkv2' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.140 NAME 'interSiteTransportContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.130 NAME 'indexServerCatalog' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.98 NAME 'ipsecPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.0 NAME 'top')",
"( 1.2.840.113556.1.5.36 NAME 'volume' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.236 NAME 'msDS-AzOperation' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.9 NAME 'groupOfNames' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.12 NAME 'configuration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.78 NAME 'licensingSiteSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.69 NAME 'nTDSSiteSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.269 NAME 'msDS-ClaimTypePropertyBase')",
"( 1.2.840.113556.1.5.273 NAME 'msDS-ResourceProperty' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.239 NAME 'msDS-AzRole' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.12 NAME 'bootableDevice')",
"( 1.2.840.113556.1.5.294 NAME 'msDS-AuthNPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.86 NAME 'dnsNode' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.210 NAME 'msWMI-StringSetParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.264 NAME 'msDS-ManagedServiceAccount' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (objectSid $ sAMAccountName ) MAY (info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ showInAddressBook $ userCert $ legacyExchangeDN $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber $ unixUserPassword $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ msDS-cloudExtensionAttribute1 $ msDS-cloudExtensionAttribute2 $ msDS-cloudExtensionAttribute3 $ msDS-cloudExtensionAttribute4 $ msDS-cloudExtensionAttribute5 $ msDS-cloudExtensionAttribute6 $ msDS-cloudExtensionAttribute7 $ msDS-cloudExtensionAttribute8 $ msDS-cloudExtensionAttribute9 $ msDS-cloudExtensionAttribute10 $ msDS-cloudExtensionAttribute11 $ msDS-cloudExtensionAttribute12 $ msDS-cloudExtensionAttribute13 $ msDS-cloudExtensionAttribute14 $ msDS-cloudExtensionAttribute15 $ msDS-cloudExtensionAttribute16 $ msDS-cloudExtensionAttribute17 $ msDS-cloudExtensionAttribute18 $ msDS-cloudExtensionAttribute19 $ msDS-cloudExtensionAttribute20 $ textEncodedORAddress $ uidNumber $ gidNumber $ gecos $ unixHomeDirectory $ loginShell $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipHostNumber ))",
"( 1.2.840.113556.1.5.15 NAME 'contact' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MAY (userCertificate $ info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ showInAddressBook $ userCert $ legacyExchangeDN $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ userSMIMECertificate $ textEncodedORAddress $ secretary $ labeledURI ))",
"( 1.3.6.1.1.1.2.0 NAME 'posixAccount')",
"( 1.2.840.113556.1.5.266 NAME 'msSPP-ActivationObjectsContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.217 NAME 'msWMI-ObjectEncoding' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.33 NAME 'storage' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.67 NAME 'domainDNS' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MAY (cACertificate $ builtinCreationTime $ builtinModifiedCount $ creationTime $ domainPolicyObject $ forceLogoff $ defaultLocalPolicyObject $ lockoutDuration $ lockOutObservationWindow $ lSACreationTime $ lSAModifiedCount $ lockoutThreshold $ maxPwdAge $ minPwdAge $ minPwdLength $ modifiedCountAtLastProm $ nETBIOSName $ nextRid $ pwdProperties $ pwdHistoryLength $ privateKey $ replicaSource $ objectSid $ oEMInformation $ serverState $ uASCompat $ serverRole $ domainReplica $ modifiedCount $ controlAccessRights $ auditingPolicy $ eFSPolicy $ desktopProfile $ nTMixedDomain $ rIDManagerReference $ treeName $ pekList $ pekKeyChangeInterval $ gPLink $ gPOptions $ ms-DS-MachineAccountQuota $ msDS-LogonTimeSyncInterval $ msDS-PerUserTrustQuota $ msDS-AllUsersTrustQuota $ msDS-PerUserTrustTombstonesQuota ))",
"( 1.2.840.113556.1.5.92 NAME 'linkTrackVolEntry' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.11 NAME 'ieee802Device')",
"( 0.9.2342.19200300.100.4.17 NAME 'domainRelatedObject')",
"( 1.2.840.113556.1.5.235 NAME 'msDS-AzApplication' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.107 NAME 'sitesContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.263 NAME 'msImaging-PostScanProcess' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.240 NAME 'msieee80211-Policy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.95 NAME 'subnetContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 0.9.2342.19200300.100.4.6 NAME 'document' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.6 NAME 'person' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.274 NAME 'msDS-ResourcePropertyList' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.270 NAME 'msDS-ClaimTypes' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.1 NAME 'shadowAccount')",
"( 1.2.840.113556.1.5.179 NAME 'mSMQMigratedUser' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.185 NAME 'mS-SQL-OLAPServer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.4.1.1466.101.119.2 NAME 'dynamicObject')",
"( 1.2.840.113556.1.5.155 NAME 'nTFRSSubscriber' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.129 NAME 'rIDSet' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.58 NAME 'addressTemplate' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.154 NAME 'nTFRSSubscriptions' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.7000.47 NAME 'nTDSDSA' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.175 NAME 'infrastructureUpdate' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.18.2.215 NAME 'msSFU30DomainInfo' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.213 NAME 'msWMI-Som' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.82 NAME 'rpcProfile' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.164 NAME 'mSMQSiteLink' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.184 NAME 'mS-SQL-SQLServer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.106 NAME 'queryPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.162 NAME 'mSMQConfiguration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.257 NAME 'msDFS-NamespaceAnchor' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.7 NAME 'msDFSR-ContentSet' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.276 NAME 'msTPM-InformationObjectsContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.209 NAME 'msWMI-RealRangeParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.7 NAME 'organizationalPerson' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.176 NAME 'msExchConfigurationContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.278 NAME 'msKds-ProvRootKey' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.238 NAME 'msDS-AzTask' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.282 NAME 'msDS-GroupManagedServiceAccount' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (objectSid $ sAMAccountName ) MAY (info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ showInAddressBook $ userCert $ legacyExchangeDN $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber $ unixUserPassword $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ msDS-cloudExtensionAttribute1 $ msDS-cloudExtensionAttribute2 $ msDS-cloudExtensionAttribute3 $ msDS-cloudExtensionAttribute4 $ msDS-cloudExtensionAttribute5 $ msDS-cloudExtensionAttribute6 $ msDS-cloudExtensionAttribute7 $ msDS-cloudExtensionAttribute8 $ msDS-cloudExtensionAttribute9 $ msDS-cloudExtensionAttribute10 $ msDS-cloudExtensionAttribute11 $ msDS-cloudExtensionAttribute12 $ msDS-cloudExtensionAttribute13 $ msDS-cloudExtensionAttribute14 $ msDS-cloudExtensionAttribute15 $ msDS-cloudExtensionAttribute16 $ msDS-cloudExtensionAttribute17 $ msDS-cloudExtensionAttribute18 $ msDS-cloudExtensionAttribute19 $ msDS-cloudExtensionAttribute20 $ textEncodedORAddress $ uidNumber $ gidNumber $ gecos $ unixHomeDirectory $ loginShell $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipHostNumber ))",
"( 1.3.6.1.1.1.2.9 NAME 'nisMap' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.10 NAME 'nisObject' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.277 NAME 'msKds-ProvServerConfiguration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.18.2.217 NAME 'msSFU30NISMapConfig' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.7000.48 NAME 'serversContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.90 NAME 'linkTrackVolumeTable' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.188 NAME 'mS-SQL-SQLDatabase' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.211 NAME 'msWMI-PolicyType' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.183 NAME 'dSUISettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.157 NAME 'groupPolicyContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.3 NAME 'samDomain' MAY (forceLogoff $ objectSid $ oEMInformation $ serverState $ uASCompat $ serverRole $ domainReplica $ modifiedCount ))",
"( 1.2.840.113556.1.5.234 NAME 'msDS-AzAdminManager' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.214 NAME 'msWMI-Rule' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.254 NAME 'nTDSDSARO' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.286 NAME 'msDS-Device' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.34 NAME 'trustedDomain' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 0.9.2342.19200300.100.4.7 NAME 'room' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.4 NAME 'organization' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.272 NAME 'msDS-ClaimType' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.3 NAME 'ipService' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.4 NAME 'ipProtocol' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.80 NAME 'rpcGroup' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.17 NAME 'server' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.28 NAME 'secret' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.163 NAME 'mSMQEnterpriseSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.202 NAME 'msWMI-MergeablePolicyTemplate' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.195 NAME 'msPKI-Key-Recovery-Agent' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (objectSid $ sAMAccountName ) MAY (info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ showInAddressBook $ userCert $ legacyExchangeDN $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber $ unixUserPassword $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ msDS-cloudExtensionAttribute1 $ msDS-cloudExtensionAttribute2 $ msDS-cloudExtensionAttribute3 $ msDS-cloudExtensionAttribute4 $ msDS-cloudExtensionAttribute5 $ msDS-cloudExtensionAttribute6 $ msDS-cloudExtensionAttribute7 $ msDS-cloudExtensionAttribute8 $ msDS-cloudExtensionAttribute9 $ msDS-cloudExtensionAttribute10 $ msDS-cloudExtensionAttribute11 $ msDS-cloudExtensionAttribute12 $ msDS-cloudExtensionAttribute13 $ msDS-cloudExtensionAttribute14 $ msDS-cloudExtensionAttribute15 $ msDS-cloudExtensionAttribute16 $ msDS-cloudExtensionAttribute17 $ msDS-cloudExtensionAttribute18 $ msDS-cloudExtensionAttribute19 $ msDS-cloudExtensionAttribute20 $ textEncodedORAddress $ uidNumber $ gidNumber $ gecos $ unixHomeDirectory $ loginShell $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag ))",
"( 0.9.2342.19200300.100.4.18 NAME 'friendlyCountry' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.258 NAME 'msDFS-Namespacev2' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.96 NAME 'subnet' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.216 NAME 'applicationVersion' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.10 NAME 'residentialPerson' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.19 NAME 'cRLDistributionPoint' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.137 NAME 'aCSPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.77 NAME 'controlAccessRight' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.219 NAME 'msMQ-Group' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.8 NAME 'group' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (cn $ objectSid $ sAMAccountName ) MAY (telephoneNumber $ userPassword $ userCertificate $ info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ showInAddressBook $ userCert $ legacyExchangeDN $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber $ unixUserPassword $ msDS-PhoneticDisplayName $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ userSMIMECertificate $ textEncodedORAddress $ secretary $ labeledURI $ gidNumber $ memberUid ))",
"( 1.2.840.113556.1.6.23.2 NAME 'msPrint-ConnectionPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.11 NAME 'crossRef' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.9 NAME 'msDFSR-Member' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.59 NAME 'displayTemplate' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.13 NAME 'classSchema' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.200 NAME 'msWMI-PolicyTemplate' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.165 NAME 'mSMQSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.5 NAME 'oncRpc' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.126 NAME 'serviceConnectionPoint' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.4 NAME 'builtinDomain' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MAY (creationTime $ forceLogoff $ lockoutDuration $ lockOutObservationWindow $ lockoutThreshold $ maxPwdAge $ minPwdAge $ minPwdLength $ modifiedCountAtLastProm $ nextRid $ pwdProperties $ pwdHistoryLength $ objectSid $ oEMInformation $ serverState $ uASCompat $ serverRole $ domainReplica $ modifiedCount ))",
"( 1.2.840.113556.1.5.241 NAME 'msDS-AppData' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.73 NAME 'rpcServerElement' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.150 NAME 'rRASAdministrationConnectionPoint' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.191 NAME 'aCSResourceLimits' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.3 NAME 'locality' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.6 NAME 'ipHost')",
"( 1.2.840.113556.1.5.275 NAME 'msTPM-InformationObject' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.289 NAME 'msDS-DeviceContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.4.2129 NAME 'msDNS-ServerSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.76 NAME 'foreignSecurityPrincipal' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.44 NAME 'classStore' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 0.9.2342.19200300.100.4.5 NAME 'account' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.26 NAME 'rpcProfileElement' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.215 NAME 'msWMI-WMIGPO' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.243 NAME 'msDS-QuotaControl' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.256 NAME 'msDS-PasswordSettingsContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.187 NAME 'mS-SQL-SQLPublication' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.9 NAME 'user' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (objectSid $ sAMAccountName ) MAY (info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ showInAddressBook $ userCert $ legacyExchangeDN $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber $ unixUserPassword $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ msDS-cloudExtensionAttribute1 $ msDS-cloudExtensionAttribute2 $ msDS-cloudExtensionAttribute3 $ msDS-cloudExtensionAttribute4 $ msDS-cloudExtensionAttribute5 $ msDS-cloudExtensionAttribute6 $ msDS-cloudExtensionAttribute7 $ msDS-cloudExtensionAttribute8 $ msDS-cloudExtensionAttribute9 $ msDS-cloudExtensionAttribute10 $ msDS-cloudExtensionAttribute11 $ msDS-cloudExtensionAttribute12 $ msDS-cloudExtensionAttribute13 $ msDS-cloudExtensionAttribute14 $ msDS-cloudExtensionAttribute15 $ msDS-cloudExtensionAttribute16 $ msDS-cloudExtensionAttribute17 $ msDS-cloudExtensionAttribute18 $ msDS-cloudExtensionAttribute19 $ msDS-cloudExtensionAttribute20 $ textEncodedORAddress $ uidNumber $ gidNumber $ gecos $ unixHomeDirectory $ loginShell $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag ))",
"( 1.2.840.113556.1.5.259 NAME 'msDFS-Linkv2' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.141 NAME 'interSiteTransport' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.4 NAME 'msDFSR-GlobalSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.29 NAME 'serviceClass' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.189 NAME 'mS-SQL-OLAPDatabase' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.16 NAME 'certificationAuthority' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.104 NAME 'meeting' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.287 NAME 'msDS-DeviceRegistrationServiceContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.71 NAME 'nTDSConnection' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.291 NAME 'msDS-AuthNPolicySilos' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.218 NAME 'msMQ-Custom-Recipient' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.72 NAME 'nTDSService' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.9 NAME 'dMD' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.280 NAME 'msDS-ClaimsTransformationPolicyType' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 0.9.2342.19200300.100.4.14 NAME 'rFC822LocalPart' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.190 NAME 'mS-SQL-OLAPCube' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.208 NAME 'msWMI-UintSetParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.3.6.1.1.1.2.2 NAME 'posixGroup')",
"( 2.5.6.17 NAME 'groupOfUniqueNames' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.252 NAME 'ms-net-ieee-8023-GroupPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.119 NAME 'ipsecNegotiationPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.292 NAME 'msDS-AuthNPolicySilo' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.121 NAME 'ipsecNFA' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.42 NAME 'dfsConfiguration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 0.9.2342.19200300.100.4.9 NAME 'documentSeries' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.271 NAME 'msDS-ResourceProperties' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.91 NAME 'linkTrackObjectMoveTable' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.136 NAME 'rpcContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.83 NAME 'rIDManager' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.206 NAME 'msWMI-IntSetParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.5 NAME 'msDFSR-ReplicationGroup' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.125 NAME 'addressBookContainer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.7000.49 NAME 'applicationSettings')",
"( 1.2.840.113556.1.5.265 NAME 'msDS-OptionalFeature' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.94 NAME 'serviceAdministrationPoint' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.102 NAME 'nTFRSReplicaSet' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.203 NAME 'msWMI-RangeParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.7000.56 NAME 'ipsecBase')",
"( 1.2.840.113556.1.6.13.4.3 NAME 'msDFSR-Subscription' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.223 NAME 'msPKI-PrivateKeyRecoveryAgent' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.178 NAME 'pKIEnrollmentService' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.18.2.211 NAME 'msSFU30MailAliases' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.53 NAME 'typeLibrary' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.8 NAME 'msDFSR-Topology' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.237 NAME 'msDS-AzScope' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.74 NAME 'categoryRegistration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.11 NAME 'comConnectionPoint' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.93 NAME 'linkTrackOMTEntry' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.10 NAME 'classRegistration' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.148 NAME 'siteLinkBridge' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.81 NAME 'rpcServer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.46 NAME 'mailRecipient')",
"( 1.2.840.113556.1.5.1 NAME 'securityObject')",
"( 1.2.840.113556.1.5.20 NAME 'leaf')",
"( 1.2.840.113556.1.5.151 NAME 'intellimirrorSCP' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.6.13.4.1 NAME 'msDFSR-LocalSettings' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.186 NAME 'mS-SQL-SQLRepository' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.8 NAME 'organizationalRole' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.20.1 NAME 'subSchema' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.284 NAME 'msDS-DeviceRegistrationService' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.84 NAME 'displaySpecifier' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.212 NAME 'msWMI-ShadowObject' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.59 NAME 'fileLinkTrackingEntry' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.4.2161 NAME 'msAuthz-CentralAccessPolicies' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.161 NAME 'mSMQQueue' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.193 NAME 'msCOM-Partition' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.118 NAME 'ipsecFilter' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.2 NAME 'country' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.97 NAME 'physicalLocation' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.3.30 NAME 'computer' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ) MUST (objectSid $ sAMAccountName ) MAY (info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ showInAddressBook $ userCert $ legacyExchangeDN $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber $ unixUserPassword $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ msDS-cloudExtensionAttribute1 $ msDS-cloudExtensionAttribute2 $ msDS-cloudExtensionAttribute3 $ msDS-cloudExtensionAttribute4 $ msDS-cloudExtensionAttribute5 $ msDS-cloudExtensionAttribute6 $ msDS-cloudExtensionAttribute7 $ msDS-cloudExtensionAttribute8 $ msDS-cloudExtensionAttribute9 $ msDS-cloudExtensionAttribute10 $ msDS-cloudExtensionAttribute11 $ msDS-cloudExtensionAttribute12 $ msDS-cloudExtensionAttribute13 $ msDS-cloudExtensionAttribute14 $ msDS-cloudExtensionAttribute15 $ msDS-cloudExtensionAttribute16 $ msDS-cloudExtensionAttribute17 $ msDS-cloudExtensionAttribute18 $ msDS-cloudExtensionAttribute19 $ msDS-cloudExtensionAttribute20 $ textEncodedORAddress $ uidNumber $ gidNumber $ gecos $ unixHomeDirectory $ loginShell $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipHostNumber ))",
"( 1.3.6.1.1.1.2.8 NAME 'nisNetgroup' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.153 NAME 'nTFRSMember' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.12 NAME 'applicationEntity' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 2.5.6.11 NAME 'applicationProcess' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.279 NAME 'msDS-ValueType' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.204 NAME 'msWMI-UnknownRangeParam' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.66 NAME 'domain')",
"( 2.5.6.13 NAME 'dSA' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))",
"( 1.2.840.113556.1.5.120 NAME 'ipsecISAKMPPolicy' AUX ( mailRecipient $ posixGroup $ ipHost $ samDomain $ dynamicObject $ shadowAccount $ domainRelatedObject $ ieee802Device $ posixAccount $ bootableDevice $ simpleSecurityObject $ securityPrincipal $ msDS-CloudExtensions $ samDomainBase ))"
],
"dSCorePropagationData": [
"16010101000000.0Z"
],
"distinguishedName": [
"CN=Aggregate,CN=Schema,CN=Configuration,DC=FOREST,DC=LAB"
],
"instanceType": [
"4"
],
"modifyTimeStamp": [
"20141006121949.0Z"
],
"name": [
"Aggregate"
],
"objectCategory": [
"CN=SubSchema,CN=Schema,CN=Configuration,DC=FOREST,DC=LAB"
],
"objectClass": [
"top",
"subSchema"
],
"objectClasses": [
"( 1.2.840.113556.1.6.13.4.6 NAME 'msDFSR-Content' SUP top STRUCTURAL MAY (msDFSR-Extension $ msDFSR-Flags $ msDFSR-Options $ msDFSR-Options2 ) )",
"( 2.5.6.14 NAME 'device' SUP top STRUCTURAL MUST (cn ) MAY (serialNumber $ l $ o $ ou $ owner $ seeAlso $ msSFU30Name $ msSFU30Aliases $ msSFU30NisDomain $ nisMapName ) )",
"( 1.2.840.113556.1.5.205 NAME 'msWMI-IntRangeParam' SUP msWMI-RangeParam STRUCTURAL MUST (msWMI-IntDefault ) MAY (msWMI-IntMax $ msWMI-IntMin ) )",
"( 1.2.840.113556.1.5.5 NAME 'samServer' SUP securityObject STRUCTURAL MAY (samDomainUpdates ) )",
"( 1.2.840.113556.1.5.196 NAME 'msPKI-Enterprise-Oid' SUP top STRUCTURAL MAY (msPKI-Cert-Template-OID $ msPKI-OID-Attribute $ msPKI-OID-CPS $ msPKI-OID-User-Notice $ msPKI-OIDLocalizedName $ msDS-OIDToGroupLink ) )",
"( 1.2.840.113556.1.5.7000.53 NAME 'crossRefContainer' SUP top STRUCTURAL MAY (uPNSuffixes $ msDS-Behavior-Version $ msDS-SPNSuffixes $ msDS-UpdateScript $ msDS-ExecuteScriptPassword $ msDS-EnabledFeature ) )",
"( 1.3.6.1.1.1.2.7 NAME 'ipNetwork' SUP top STRUCTURAL MUST (cn $ ipNetworkNumber ) MAY (l $ description $ uid $ manager $ msSFU30Name $ msSFU30Aliases $ msSFU30NisDomain $ ipNetmaskNumber $ nisMapName ) )",
"( 2.5.6.5 NAME 'organizationalUnit' SUP top STRUCTURAL MUST (ou ) MAY (c $ l $ st $ street $ searchGuide $ businessCategory $ postalAddress $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ telephoneNumber $ telexNumber $ teletexTerminalIdentifier $ facsimileTelephoneNumber $ x121Address $ internationalISDNNumber $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ seeAlso $ userPassword $ co $ countryCode $ desktopProfile $ defaultGroup $ managedBy $ uPNSuffixes $ gPLink $ gPOptions $ msCOM-UserPartitionSetLink $ thumbnailLogo ) )",
"( 1.2.840.113556.1.5.152 NAME 'intellimirrorGroup' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.253 NAME 'msFVE-RecoveryInformation' SUP top STRUCTURAL MUST (msFVE-RecoveryPassword $ msFVE-RecoveryGuid ) MAY (msFVE-VolumeGuid $ msFVE-KeyPackage ) )",
"( 1.2.840.113556.1.5.262 NAME 'msImaging-PSPs' SUP container STRUCTURAL )",
"( 1.2.840.113556.1.5.251 NAME 'ms-net-ieee-80211-GroupPolicy' SUP top STRUCTURAL MAY (ms-net-ieee-80211-GP-PolicyGUID $ ms-net-ieee-80211-GP-PolicyData $ ms-net-ieee-80211-GP-PolicyReserved ) )",
"( 1.2.840.113556.1.5.138 NAME 'aCSSubnet' SUP top STRUCTURAL MAY (aCSMaxTokenRatePerFlow $ aCSMaxPeakBandwidthPerFlow $ aCSMaxDurationPerFlow $ aCSAllocableRSVPBandwidth $ aCSMaxPeakBandwidth $ aCSEnableRSVPMessageLogging $ aCSEventLogLevel $ aCSEnableACSService $ aCSRSVPLogFilesLocation $ aCSMaxNoOfLogFiles $ aCSMaxSizeOfRSVPLogFile $ aCSDSBMPriority $ aCSDSBMRefresh $ aCSDSBMDeadTime $ aCSCacheTimeout $ aCSNonReservedTxLimit $ aCSNonReservedTxSize $ aCSEnableRSVPAccounting $ aCSRSVPAccountFilesLocation $ aCSMaxNoOfAccountFiles $ aCSMaxSizeOfRSVPAccountFile $ aCSServerList $ aCSNonReservedPeakRate $ aCSNonReservedTokenSize $ aCSNonReservedMaxSDUSize $ aCSNonReservedMinPolicedSize ) )",
"( 1.2.840.113556.1.5.43 NAME 'fTDfs' SUP top STRUCTURAL MUST (remoteServerName $ pKTGuid $ pKT ) MAY (keywords $ uNCName $ managedBy ) )",
"( 1.2.840.113556.1.5.27 NAME 'rpcEntry' SUP connectionPoint ABSTRACT )",
"( 1.2.840.113556.1.5.85 NAME 'dnsZone' SUP top STRUCTURAL MUST (dc ) MAY (dnsAllowDynamic $ dnsAllowXFR $ dnsSecureSecondaries $ dnsNotifySecondaries $ managedBy $ dNSProperty $ msDNS-IsSigned $ msDNS-SignWithNSEC3 $ msDNS-NSEC3OptOut $ msDNS-MaintainTrustAnchor $ msDNS-DSRecordAlgorithms $ msDNS-RFC5011KeyRollovers $ msDNS-NSEC3HashAlgorithm $ msDNS-NSEC3RandomSaltLength $ msDNS-NSEC3Iterations $ msDNS-DNSKEYRecordSetTTL $ msDNS-DSRecordSetTTL $ msDNS-SignatureInceptionOffset $ msDNS-SecureDelegationPollingPeriod $ msDNS-SigningKeyDescriptors $ msDNS-SigningKeys $ msDNS-DNSKEYRecords $ msDNS-ParentHasSecureDelegation $ msDNS-PropagationTime $ msDNS-NSEC3UserSalt $ msDNS-NSEC3CurrentSalt ) )",
"( 1.2.840.113556.1.4.2163 NAME 'msAuthz-CentralAccessRule' SUP top STRUCTURAL MAY (Enabled $ msAuthz-EffectiveSecurityPolicy $ msAuthz-ProposedSecurityPolicy $ msAuthz-LastEffectiveSecurityPolicy $ msAuthz-ResourceCondition $ msAuthz-MemberRulesInCentralAccessPolicyBL ) )",
"( 1.2.840.113556.1.5.194 NAME 'msCOM-PartitionSet' SUP top STRUCTURAL MAY (msCOM-PartitionLink $ msCOM-DefaultPartitionLink $ msCOM-ObjectId ) )",
"( 1.2.840.113556.1.5.242 NAME 'msDS-QuotaContainer' SUP top STRUCTURAL MUST (cn ) MAY (msDS-DefaultQuota $ msDS-TombstoneQuotaFactor $ msDS-QuotaEffective $ msDS-QuotaUsed $ msDS-TopQuotaUsage ) )",
"( 1.2.840.113556.1.5.281 NAME 'msDS-ClaimsTransformationPolicies' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.146 NAME 'remoteStorageServicePoint' SUP serviceAdministrationPoint STRUCTURAL MAY (remoteStorageGUID ) )",
"( 1.2.840.113556.1.5.2 NAME 'samDomainBase' SUP top AUXILIARY MAY (nTSecurityDescriptor $ creationTime $ forceLogoff $ lockoutDuration $ lockOutObservationWindow $ lockoutThreshold $ maxPwdAge $ minPwdAge $ minPwdLength $ modifiedCountAtLastProm $ nextRid $ pwdProperties $ pwdHistoryLength $ revision $ objectSid $ oEMInformation $ serverState $ uASCompat $ serverRole $ domainReplica $ modifiedCount ) )",
"( 1.2.840.113556.1.5.132 NAME 'dHCPClass' SUP top STRUCTURAL MUST (dhcpUniqueKey $ dhcpType $ dhcpFlags $ dhcpIdentification ) MAY (networkAddress $ dhcpObjName $ dhcpObjDescription $ dhcpServers $ dhcpSubnets $ dhcpMask $ dhcpRanges $ dhcpSites $ dhcpReservations $ superScopes $ superScopeDescription $ optionDescription $ optionsLocation $ dhcpOptions $ dhcpClasses $ mscopeId $ dhcpState $ dhcpProperties $ dhcpMaxKey $ dhcpUpdateTime ) )",
"( 1.2.840.113556.1.5.283 NAME 'msDS-CloudExtensions' SUP top AUXILIARY MAY (msDS-cloudExtensionAttribute1 $ msDS-cloudExtensionAttribute2 $ msDS-cloudExtensionAttribute3 $ msDS-cloudExtensionAttribute4 $ msDS-cloudExtensionAttribute5 $ msDS-cloudExtensionAttribute6 $ msDS-cloudExtensionAttribute7 $ msDS-cloudExtensionAttribute8 $ msDS-cloudExtensionAttribute9 $ msDS-cloudExtensionAttribute10 $ msDS-cloudExtensionAttribute11 $ msDS-cloudExtensionAttribute12 $ msDS-cloudExtensionAttribute13 $ msDS-cloudExtensionAttribute14 $ msDS-cloudExtensionAttribute15 $ msDS-cloudExtensionAttribute16 $ msDS-cloudExtensionAttribute17 $ msDS-cloudExtensionAttribute18 $ msDS-cloudExtensionAttribute19 $ msDS-cloudExtensionAttribute20 ) )",
"( 1.2.840.113556.1.5.89 NAME 'nTFRSSettings' SUP applicationSettings STRUCTURAL MAY (fRSExtensions $ managedBy ) )",
"( 1.2.840.113556.1.5.24 NAME 'remoteMailRecipient' SUP top STRUCTURAL MAY (remoteSource $ remoteSourceType $ managedBy ) )",
"( 1.2.840.113556.1.5.221 NAME 'msTAPI-RtConference' SUP top STRUCTURAL MUST (msTAPI-uid ) MAY (msTAPI-ProtocolId $ msTAPI-ConferenceBlob ) )",
"( 1.2.840.113556.1.5.201 NAME 'msWMI-SimplePolicyTemplate' SUP msWMI-PolicyTemplate STRUCTURAL MUST (msWMI-TargetObject ) )",
"( 1.2.840.113556.1.6.18.2.212 NAME 'msSFU30NetId' SUP top STRUCTURAL MAY (msSFU30Name $ msSFU30KeyValues $ msSFU30NisDomain $ nisMapName ) )",
"( 1.2.840.113556.1.5.49 NAME 'packageRegistration' SUP top STRUCTURAL MAY (msiScriptPath $ cOMClassID $ cOMInterfaceID $ cOMProgID $ localeID $ machineArchitecture $ iconPath $ cOMTypelibId $ vendor $ packageType $ setupCommand $ packageName $ packageFlags $ versionNumberHi $ versionNumberLo $ lastUpdateSequence $ managedBy $ msiFileList $ categories $ upgradeProductCode $ msiScript $ canUpgradeScript $ fileExtPriority $ productCode $ msiScriptName $ msiScriptSize $ installUiLevel ) )",
"( 1.2.840.113556.1.5.139 NAME 'lostAndFound' SUP top STRUCTURAL MAY (moveTreeState ) )",
"( 1.2.840.113556.1.5.14 NAME 'connectionPoint' SUP leaf ABSTRACT MUST (cn ) MAY (keywords $ managedBy $ msDS-Settings ) )",
"( 1.2.840.113556.1.5.6 NAME 'securityPrincipal' SUP top AUXILIARY MUST (objectSid $ sAMAccountName ) MAY (nTSecurityDescriptor $ securityIdentifier $ supplementalCredentials $ rid $ sAMAccountType $ sIDHistory $ altSecurityIdentities $ tokenGroups $ tokenGroupsNoGCAcceptable $ accountNameHistory $ tokenGroupsGlobalAndUniversal $ msDS-KeyVersionNumber ) )",
"( 1.2.840.113556.1.5.147 NAME 'siteLink' SUP top STRUCTURAL MUST (siteList ) MAY (cost $ schedule $ options $ replInterval ) )",
"( 1.2.840.113556.1.5.255 NAME 'msDS-PasswordSettings' SUP top STRUCTURAL MUST (msDS-MaximumPasswordAge $ msDS-MinimumPasswordAge $ msDS-MinimumPasswordLength $ msDS-PasswordHistoryLength $ msDS-PasswordComplexityEnabled $ msDS-PasswordReversibleEncryptionEnabled $ msDS-LockoutObservationWindow $ msDS-LockoutDuration $ msDS-LockoutThreshold $ msDS-PasswordSettingsPrecedence ) MAY (msDS-PSOAppliesTo ) )",
"( 1.2.840.113556.1.4.2162 NAME 'msAuthz-CentralAccessRules' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.30 NAME 'serviceInstance' SUP connectionPoint STRUCTURAL MUST (displayName $ serviceClassID ) MAY (winsockAddresses $ serviceInstanceVersion ) )",
"( 1.2.840.113556.1.5.156 NAME 'rRASAdministrationDictionary' SUP top STRUCTURAL MAY (msRRASVendorAttributeEntry ) )",
"( 1.2.840.113556.1.4.2164 NAME 'msAuthz-CentralAccessPolicy' SUP top STRUCTURAL MAY (msAuthz-CentralAccessPolicyID $ msAuthz-MemberRulesInCentralAccessPolicy ) )",
"( 2.16.840.1.113730.3.2.2 NAME 'inetOrgPerson' SUP user STRUCTURAL MAY (o $ businessCategory $ userCertificate $ givenName $ initials $ x500uniqueIdentifier $ displayName $ employeeNumber $ employeeType $ homePostalAddress $ userSMIMECertificate $ uid $ mail $ roomNumber $ photo $ manager $ homePhone $ secretary $ mobile $ pager $ audio $ jpegPhoto $ carLicense $ departmentNumber $ preferredLanguage $ userPKCS12 $ labeledURI ) )",
"( 1.2.840.113556.1.5.52 NAME 'fileLinkTracking' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.18 NAME 'domainPolicy' SUP leaf STRUCTURAL MAY (authenticationOptions $ forceLogoff $ defaultLocalPolicyObject $ lockoutDuration $ lockOutObservationWindow $ lockoutThreshold $ maxPwdAge $ maxRenewAge $ maxTicketAge $ minPwdAge $ minPwdLength $ minTicketAge $ pwdProperties $ pwdHistoryLength $ proxyLifetime $ eFSPolicy $ publicKeyPolicy $ domainWidePolicy $ domainPolicyReference $ qualityOfService $ ipsecPolicyReference $ managedBy $ domainCAs ) )",
"( 1.2.840.113556.1.6.18.2.216 NAME 'msSFU30NetworkUser' SUP top STRUCTURAL MAY (msSFU30Name $ msSFU30KeyValues $ msSFU30NisDomain $ nisMapName ) )",
"( 0.9.2342.19200300.100.4.19 NAME 'simpleSecurityObject' SUP top AUXILIARY MAY (userPassword ) )",
"( 1.2.840.113556.1.5.177 NAME 'pKICertificateTemplate' SUP top STRUCTURAL MAY (displayName $ flags $ pKIDefaultKeySpec $ pKIKeyUsage $ pKIMaxIssuingDepth $ pKICriticalExtensions $ pKIExpirationPeriod $ pKIOverlapPeriod $ pKIExtendedKeyUsage $ pKIDefaultCSPs $ pKIEnrollmentAccess $ msPKI-RA-Signature $ msPKI-Enrollment-Flag $ msPKI-Private-Key-Flag $ msPKI-Certificate-Name-Flag $ msPKI-Minimal-Key-Size $ msPKI-Template-Schema-Version $ msPKI-Template-Minor-Revision $ msPKI-Cert-Template-OID $ msPKI-Supersede-Templates $ msPKI-RA-Policies $ msPKI-Certificate-Policy $ msPKI-Certificate-Application-Policy $ msPKI-RA-Application-Policies ) )",
"( 1.2.840.113556.1.5.293 NAME 'msDS-AuthNPolicies' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.6.13.4.2 NAME 'msDFSR-Subscriber' SUP top STRUCTURAL MUST (msDFSR-ReplicationGroupGuid $ msDFSR-MemberReference ) MAY (msDFSR-Extension $ msDFSR-Flags $ msDFSR-Options $ msDFSR-Options2 ) )",
"( 1.2.840.113556.1.5.31 NAME 'site' SUP top STRUCTURAL MAY (location $ notificationList $ managedBy $ gPLink $ gPOptions $ mSMQSiteID $ mSMQNt4Stub $ mSMQSiteForeign $ mSMQInterval1 $ mSMQInterval2 $ msDS-BridgeHeadServersUsed ) )",
"( 1.2.840.113556.1.5.222 NAME 'msTAPI-RtPerson' SUP top STRUCTURAL MAY (msTAPI-uid $ msTAPI-IpAddress ) )",
"( 1.2.840.113556.1.5.68 NAME 'applicationSiteSettings' SUP top ABSTRACT MAY (applicationName $ notificationList ) )",
"( 1.2.840.113556.1.3.14 NAME 'attributeSchema' SUP top STRUCTURAL MUST (cn $ attributeID $ attributeSyntax $ isSingleValued $ oMSyntax $ lDAPDisplayName $ schemaIDGUID ) MAY (rangeLower $ rangeUpper $ mAPIID $ linkID $ oMObjectClass $ searchFlags $ extendedCharsAllowed $ schemaFlagsEx $ attributeSecurityGUID $ systemOnly $ classDisplayName $ isMemberOfPartialAttributeSet $ isDefunct $ isEphemeral $ msDs-Schema-Extensions $ msDS-IntId ) )",
"( 1.2.840.113556.1.5.267 NAME 'msSPP-ActivationObject' SUP top STRUCTURAL MUST (msSPP-CSVLKSkuId $ msSPP-KMSIds $ msSPP-CSVLKPid $ msSPP-CSVLKPartialProductKey ) MAY (msSPP-InstallationId $ msSPP-ConfirmationId $ msSPP-OnlineLicense $ msSPP-PhoneLicense $ msSPP-ConfigLicense $ msSPP-IssuanceLicense ) )",
"( 1.2.840.113556.1.5.220 NAME 'msDS-App-Configuration' SUP applicationSettings STRUCTURAL MAY (owner $ keywords $ managedBy $ msDS-ByteArray $ msDS-DateTime $ msDS-Integer $ msDS-ObjectReference ) )",
"( 1.2.840.113556.1.3.23 NAME 'container' SUP top STRUCTURAL MUST (cn ) MAY (schemaVersion $ defaultClassStore $ msDS-ObjectReference ) )",
"( 1.2.840.113556.1.6.13.4.10 NAME 'msDFSR-Connection' SUP top STRUCTURAL MUST (fromServer ) MAY (msDFSR-Extension $ msDFSR-Enabled $ msDFSR-Schedule $ msDFSR-Keywords $ msDFSR-Flags $ msDFSR-Options $ msDFSR-RdcEnabled $ msDFSR-RdcMinFileSizeInKb $ msDFSR-Priority $ msDFSR-DisablePacketPrivacy $ msDFSR-Options2 ) )",
"( 1.2.840.113556.1.5.207 NAME 'msWMI-UintRangeParam' SUP msWMI-RangeParam STRUCTURAL MUST (msWMI-IntDefault ) MAY (msWMI-IntMax $ msWMI-IntMin ) )",
"( 1.2.840.113556.1.5.23 NAME 'printQueue' SUP connectionPoint STRUCTURAL MUST (uNCName $ versionNumber $ serverName $ printerName $ shortServerName ) MAY (location $ portName $ driverName $ printSeparatorFile $ priority $ defaultPriority $ printStartTime $ printEndTime $ printFormName $ printBinNames $ printMaxResolutionSupported $ printOrientationsSupported $ printMaxCopies $ printCollate $ printColor $ printLanguage $ printAttributes $ printShareName $ printOwner $ printNotify $ printStatus $ printSpooling $ printKeepPrintedJobs $ driverVersion $ printMaxXExtent $ printMaxYExtent $ printMinXExtent $ printMinYExtent $ printStaplingSupported $ printMemory $ assetNumber $ bytesPerMinute $ printRate $ printRateUnit $ printNetworkAddress $ printMACAddress $ printMediaReady $ printNumberUp $ printMediaSupported $ operatingSystem $ operatingSystemVersion $ operatingSystemServicePack $ operatingSystemHotfix $ physicalLocationObject $ printPagesPerMinute $ printDuplexSupported ) )",
"( 1.2.840.113556.1.5.260 NAME 'msDFS-DeletedLinkv2' SUP top STRUCTURAL MUST (msDFS-NamespaceIdentityGUIDv2 $ msDFS-LastModifiedv2 $ msDFS-LinkPathv2 $ msDFS-LinkIdentityGUIDv2 ) MAY (msDFS-Commentv2 $ msDFS-ShortNameLinkPathv2 ) )",
"( 1.2.840.113556.1.5.140 NAME 'interSiteTransportContainer' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.130 NAME 'indexServerCatalog' SUP connectionPoint STRUCTURAL MUST (creator ) MAY (uNCName $ queryPoint $ indexedScopes $ friendlyNames ) )",
"( 1.2.840.113556.1.5.98 NAME 'ipsecPolicy' SUP ipsecBase STRUCTURAL MAY (ipsecISAKMPReference $ ipsecNFAReference ) )",
"( 2.5.6.0 NAME 'top' ABSTRACT MUST (objectClass $ instanceType $ nTSecurityDescriptor $ objectCategory ) MAY (cn $ description $ distinguishedName $ whenCreated $ whenChanged $ subRefs $ displayName $ uSNCreated $ isDeleted $ dSASignature $ objectVersion $ repsTo $ repsFrom $ memberOf $ ownerBL $ uSNChanged $ uSNLastObjRem $ showInAdvancedViewOnly $ adminDisplayName $ proxyAddresses $ adminDescription $ extensionName $ uSNDSALastObjRemoved $ displayNamePrintable $ directReports $ wWWHomePage $ USNIntersite $ name $ objectGUID $ replPropertyMetaData $ replUpToDateVector $ flags $ revision $ wbemPath $ fSMORoleOwner $ systemFlags $ siteObjectBL $ serverReferenceBL $ nonSecurityMemberBL $ queryPolicyBL $ wellKnownObjects $ isPrivilegeHolder $ partialAttributeSet $ managedObjects $ partialAttributeDeletionList $ url $ lastKnownParent $ bridgeheadServerListBL $ netbootSCPBL $ isCriticalSystemObject $ frsComputerReferenceBL $ fRSMemberReferenceBL $ uSNSource $ fromEntry $ allowedChildClasses $ allowedChildClassesEffective $ allowedAttributes $ allowedAttributesEffective $ possibleInferiors $ canonicalName $ proxiedObjectName $ sDRightsEffective $ dSCorePropagationData $ otherWellKnownObjects $ mS-DS-ConsistencyGuid $ mS-DS-ConsistencyChildCount $ masteredBy $ msCOM-PartitionSetLink $ msCOM-UserLink $ msDS-Approx-Immed-Subordinates $ msDS-NCReplCursors $ msDS-NCReplInboundNeighbors $ msDS-NCReplOutboundNeighbors $ msDS-ReplAttributeMetaData $ msDS-ReplValueMetaData $ msDS-NonMembersBL $ msDS-MembersForAzRoleBL $ msDS-OperationsForAzTaskBL $ msDS-TasksForAzTaskBL $ msDS-OperationsForAzRoleBL $ msDS-TasksForAzRoleBL $ msDs-masteredBy $ msDS-ObjectReferenceBL $ msDS-PrincipalName $ msDS-RevealedDSAs $ msDS-KrbTgtLinkBl $ msDS-IsFullReplicaFor $ msDS-IsDomainFor $ msDS-IsPartialReplicaFor $ msDS-AuthenticatedToAccountlist $ msDS-NC-RO-Replica-Locations-BL $ msDS-RevealedListBL $ msDS-PSOApplied $ msDS-NcType $ msDS-OIDToGroupLinkBl $ msDS-HostServiceAccountBL $ isRecycled $ msDS-LocalEffectiveDeletionTime $ msDS-LocalEffectiveRecycleTime $ msDS-LastKnownRDN $ msDS-EnabledFeatureBL $ msDS-ClaimSharesPossibleValuesWithBL $ msDS-MembersOfResourcePropertyListBL $ msDS-IsPrimaryComputerFor $ msDS-ValueTypeReferenceBL $ msDS-TDOIngressBL $ msDS-TDOEgressBL $ msDS-parentdistname $ msDS-ReplValueMetaDataExt $ msds-memberOfTransitive $ msds-memberTransitive $ structuralObjectClass $ createTimeStamp $ modifyTimeStamp $ subSchemaSubEntry $ msSFU30PosixMemberOf $ msDFSR-MemberReferenceBL $ msDFSR-ComputerReferenceBL ) )",
"( 1.2.840.113556.1.5.36 NAME 'volume' SUP connectionPoint STRUCTURAL MUST (uNCName ) MAY (contentIndexingAllowed $ lastContentIndexed ) )",
"( 1.2.840.113556.1.5.236 NAME 'msDS-AzOperation' SUP top STRUCTURAL MUST (msDS-AzOperationID ) MAY (description $ msDS-AzApplicationData $ msDS-AzObjectGuid $ msDS-AzGenericData ) )",
"( 2.5.6.9 NAME 'groupOfNames' SUP top STRUCTURAL MUST (cn $ member ) MAY (o $ ou $ businessCategory $ owner $ seeAlso ) )",
"( 1.2.840.113556.1.5.12 NAME 'configuration' SUP top STRUCTURAL MUST (cn ) MAY (gPLink $ gPOptions $ msDS-USNLastSyncSuccess ) )",
"( 1.2.840.113556.1.5.78 NAME 'licensingSiteSettings' SUP applicationSiteSettings STRUCTURAL MAY (siteServer ) )",
"( 1.2.840.113556.1.5.69 NAME 'nTDSSiteSettings' SUP applicationSiteSettings STRUCTURAL MAY (schedule $ options $ queryPolicyObject $ managedBy $ interSiteTopologyGenerator $ interSiteTopologyRenew $ interSiteTopologyFailover $ msDS-Preferred-GC-Site ) )",
"( 1.2.840.113556.1.5.269 NAME 'msDS-ClaimTypePropertyBase' SUP top ABSTRACT MAY (Enabled $ msDS-ClaimPossibleValues $ msDS-ClaimSharesPossibleValuesWith ) )",
"( 1.2.840.113556.1.5.273 NAME 'msDS-ResourceProperty' SUP msDS-ClaimTypePropertyBase STRUCTURAL MUST (msDS-ValueTypeReference ) MAY (msDS-IsUsedAsResourceSecurityAttribute $ msDS-AppliesToResourceTypes ) )",
"( 1.2.840.113556.1.5.239 NAME 'msDS-AzRole' SUP top STRUCTURAL MAY (description $ msDS-MembersForAzRole $ msDS-OperationsForAzRole $ msDS-TasksForAzRole $ msDS-AzApplicationData $ msDS-AzObjectGuid $ msDS-AzGenericData ) )",
"( 1.3.6.1.1.1.2.12 NAME 'bootableDevice' SUP top AUXILIARY MAY (cn $ bootParameter $ bootFile ) )",
"( 1.2.840.113556.1.5.294 NAME 'msDS-AuthNPolicy' SUP top STRUCTURAL MAY (msDS-UserAllowedToAuthenticateTo $ msDS-UserAllowedToAuthenticateFrom $ msDS-UserTGTLifetime $ msDS-ComputerAllowedToAuthenticateTo $ msDS-ComputerTGTLifetime $ msDS-ServiceAllowedToAuthenticateTo $ msDS-ServiceAllowedToAuthenticateFrom $ msDS-ServiceTGTLifetime $ msDS-UserAuthNPolicyBL $ msDS-ComputerAuthNPolicyBL $ msDS-ServiceAuthNPolicyBL $ msDS-AssignedAuthNPolicyBL $ msDS-AuthNPolicyEnforced ) )",
"( 1.2.840.113556.1.5.86 NAME 'dnsNode' SUP top STRUCTURAL MUST (dc ) MAY (dnsRecord $ dNSProperty $ dNSTombstoned ) )",
"( 1.2.840.113556.1.5.210 NAME 'msWMI-StringSetParam' SUP msWMI-RangeParam STRUCTURAL MUST (msWMI-StringDefault ) MAY (msWMI-StringValidValues ) )",
"( 1.2.840.113556.1.5.264 NAME 'msDS-ManagedServiceAccount' SUP computer STRUCTURAL )",
"( 1.2.840.113556.1.5.15 NAME 'contact' SUP organizationalPerson STRUCTURAL MUST (cn ) MAY (notes $ msDS-SourceObjectDN ) )",
"( 1.3.6.1.1.1.2.0 NAME 'posixAccount' SUP top AUXILIARY MAY (cn $ description $ userPassword $ homeDirectory $ unixUserPassword $ uid $ uidNumber $ gidNumber $ gecos $ unixHomeDirectory $ loginShell ) )",
"( 1.2.840.113556.1.5.266 NAME 'msSPP-ActivationObjectsContainer' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.217 NAME 'msWMI-ObjectEncoding' SUP top STRUCTURAL MUST (msWMI-ID $ msWMI-TargetObject $ msWMI-Class $ msWMI-Genus $ msWMI-intFlags1 $ msWMI-intFlags2 $ msWMI-intFlags3 $ msWMI-intFlags4 $ msWMI-Parm1 $ msWMI-Parm2 $ msWMI-Parm3 $ msWMI-Parm4 $ msWMI-ScopeGuid ) )",
"( 1.2.840.113556.1.5.33 NAME 'storage' SUP connectionPoint STRUCTURAL MAY (moniker $ monikerDisplayName $ iconPath ) )",
"( 1.2.840.113556.1.5.67 NAME 'domainDNS' SUP domain STRUCTURAL MAY (managedBy $ msDS-Behavior-Version $ msDS-AllowedDNSSuffixes $ msDS-USNLastSyncSuccess $ msDS-EnabledFeature ) )",
"( 1.2.840.113556.1.5.92 NAME 'linkTrackVolEntry' SUP leaf STRUCTURAL MAY (linkTrackSecret $ volTableIdxGUID $ volTableGUID $ currMachineId $ timeVolChange $ timeRefresh $ seqNotification $ objectCount ) )",
"( 1.3.6.1.1.1.2.11 NAME 'ieee802Device' SUP top AUXILIARY MAY (cn $ macAddress ) )",
"( 0.9.2342.19200300.100.4.17 NAME 'domainRelatedObject' SUP top AUXILIARY MAY (associatedDomain ) )",
"( 1.2.840.113556.1.5.235 NAME 'msDS-AzApplication' SUP top STRUCTURAL MAY (description $ msDS-AzApplicationName $ msDS-AzGenerateAudits $ msDS-AzClassId $ msDS-AzApplicationVersion $ msDS-AzApplicationData $ msDS-AzObjectGuid $ msDS-AzGenericData ) )",
"( 1.2.840.113556.1.5.107 NAME 'sitesContainer' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.263 NAME 'msImaging-PostScanProcess' SUP top STRUCTURAL MUST (displayName $ msImaging-PSPIdentifier ) MAY (serverName $ msImaging-PSPString ) )",
"( 1.2.840.113556.1.5.240 NAME 'msieee80211-Policy' SUP top STRUCTURAL MAY (msieee80211-Data $ msieee80211-DataType $ msieee80211-ID ) )",
"( 1.2.840.113556.1.5.95 NAME 'subnetContainer' SUP top STRUCTURAL )",
"( 0.9.2342.19200300.100.4.6 NAME 'document' SUP top STRUCTURAL MAY (cn $ l $ o $ ou $ description $ seeAlso $ documentIdentifier $ documentTitle $ documentVersion $ documentAuthor $ documentLocation $ documentPublisher ) )",
"( 2.5.6.6 NAME 'person' SUP top STRUCTURAL MUST (cn ) MAY (sn $ serialNumber $ telephoneNumber $ seeAlso $ userPassword $ attributeCertificateAttribute ) )",
"( 1.2.840.113556.1.5.274 NAME 'msDS-ResourcePropertyList' SUP top STRUCTURAL MAY (msDS-MembersOfResourcePropertyList ) )",
"( 1.2.840.113556.1.5.270 NAME 'msDS-ClaimTypes' SUP top STRUCTURAL )",
"( 1.3.6.1.1.1.2.1 NAME 'shadowAccount' SUP top AUXILIARY MAY (description $ userPassword $ uid $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag ) )",
"( 1.2.840.113556.1.5.179 NAME 'mSMQMigratedUser' SUP top STRUCTURAL MAY (objectSid $ mSMQSignCertificates $ mSMQDigests $ mSMQDigestsMig $ mSMQSignCertificatesMig $ mSMQUserSid ) )",
"( 1.2.840.113556.1.5.185 NAME 'mS-SQL-OLAPServer' SUP serviceConnectionPoint STRUCTURAL MAY (mS-SQL-Name $ mS-SQL-RegisteredOwner $ mS-SQL-Contact $ mS-SQL-Build $ mS-SQL-ServiceAccount $ mS-SQL-Status $ mS-SQL-InformationURL $ mS-SQL-PublicationURL $ mS-SQL-Version $ mS-SQL-Language $ mS-SQL-Keywords ) )",
"( 1.3.6.1.4.1.1466.101.119.2 NAME 'dynamicObject' SUP top AUXILIARY MAY (msDS-Entry-Time-To-Die $ entryTTL ) )",
"( 1.2.840.113556.1.5.155 NAME 'nTFRSSubscriber' SUP top STRUCTURAL MUST (fRSRootPath $ fRSStagingPath ) MAY (schedule $ fRSUpdateTimeout $ fRSFaultCondition $ fRSServiceCommand $ fRSExtensions $ fRSFlags $ fRSMemberReference $ fRSServiceCommandStatus $ fRSTimeLastCommand $ fRSTimeLastConfigChange ) )",
"( 1.2.840.113556.1.5.129 NAME 'rIDSet' SUP top STRUCTURAL MUST (rIDAllocationPool $ rIDPreviousAllocationPool $ rIDUsedPool $ rIDNextRID ) )",
"( 1.2.840.113556.1.3.58 NAME 'addressTemplate' SUP displayTemplate STRUCTURAL MUST (displayName ) MAY (addressSyntax $ perMsgDialogDisplayTable $ perRecipDialogDisplayTable $ addressType $ proxyGenerationEnabled ) )",
"( 1.2.840.113556.1.5.154 NAME 'nTFRSSubscriptions' SUP top STRUCTURAL MAY (fRSWorkingPath $ fRSExtensions $ fRSVersion ) )",
"( 1.2.840.113556.1.5.7000.47 NAME 'nTDSDSA' SUP applicationSettings STRUCTURAL MAY (hasMasterNCs $ hasPartialReplicaNCs $ dMDLocation $ invocationId $ networkAddress $ options $ fRSRootPath $ serverReference $ lastBackupRestorationTime $ queryPolicyObject $ managedBy $ retiredReplDSASignatures $ msDS-Behavior-Version $ msDS-HasInstantiatedNCs $ msDS-ReplicationEpoch $ msDS-HasDomainNCs $ msDS-RetiredReplNCSignatures $ msDS-hasMasterNCs $ msDS-RevealedUsers $ msDS-hasFullReplicaNCs $ msDS-NeverRevealGroup $ msDS-RevealOnDemandGroup $ msDS-isGC $ msDS-isRODC $ msDS-SiteName $ msDS-IsUserCachableAtRodc $ msDS-EnabledFeature ) )",
"( 1.2.840.113556.1.5.175 NAME 'infrastructureUpdate' SUP top STRUCTURAL MAY (dNReferenceUpdate ) )",
"( 1.2.840.113556.1.6.18.2.215 NAME 'msSFU30DomainInfo' SUP top STRUCTURAL MAY (msSFU30SearchContainer $ msSFU30MasterServerName $ msSFU30OrderNumber $ msSFU30Domains $ msSFU30YpServers $ msSFU30MaxGidNumber $ msSFU30MaxUidNumber $ msSFU30IsValidContainer $ msSFU30CryptMethod ) )",
"( 1.2.840.113556.1.5.213 NAME 'msWMI-Som' SUP top STRUCTURAL MUST (msWMI-ID $ msWMI-Name ) MAY (msWMI-Author $ msWMI-ChangeDate $ msWMI-CreationDate $ msWMI-SourceOrganization $ msWMI-intFlags1 $ msWMI-intFlags2 $ msWMI-intFlags3 $ msWMI-intFlags4 $ msWMI-Parm1 $ msWMI-Parm2 $ msWMI-Parm3 $ msWMI-Parm4 ) )",
"( 1.2.840.113556.1.5.82 NAME 'rpcProfile' SUP rpcEntry STRUCTURAL )",
"( 1.2.840.113556.1.5.164 NAME 'mSMQSiteLink' SUP top STRUCTURAL MUST (mSMQSite1 $ mSMQSite2 $ mSMQCost ) MAY (mSMQSiteGates $ mSMQSiteGatesMig ) )",
"( 1.2.840.113556.1.5.184 NAME 'mS-SQL-SQLServer' SUP serviceConnectionPoint STRUCTURAL MAY (mS-SQL-Name $ mS-SQL-RegisteredOwner $ mS-SQL-Contact $ mS-SQL-Location $ mS-SQL-Memory $ mS-SQL-Build $ mS-SQL-ServiceAccount $ mS-SQL-CharacterSet $ mS-SQL-SortOrder $ mS-SQL-UnicodeSortOrder $ mS-SQL-Clustered $ mS-SQL-NamedPipe $ mS-SQL-MultiProtocol $ mS-SQL-SPX $ mS-SQL-TCPIP $ mS-SQL-AppleTalk $ mS-SQL-Vines $ mS-SQL-Status $ mS-SQL-LastUpdatedDate $ mS-SQL-InformationURL $ mS-SQL-GPSLatitude $ mS-SQL-GPSLongitude $ mS-SQL-GPSHeight $ mS-SQL-Keywords ) )",
"( 1.2.840.113556.1.5.106 NAME 'queryPolicy' SUP top STRUCTURAL MAY (lDAPAdminLimits $ lDAPIPDenyList ) )",
"( 1.2.840.113556.1.5.162 NAME 'mSMQConfiguration' SUP top STRUCTURAL MAY (mSMQQuota $ mSMQJournalQuota $ mSMQOwnerID $ mSMQSites $ mSMQOutRoutingServers $ mSMQInRoutingServers $ mSMQServiceType $ mSMQComputerType $ mSMQForeign $ mSMQOSType $ mSMQEncryptKey $ mSMQSignKey $ mSMQDependentClientServices $ mSMQRoutingServices $ mSMQDsServices $ mSMQComputerTypeEx ) )",
"( 1.2.840.113556.1.5.257 NAME 'msDFS-NamespaceAnchor' SUP top STRUCTURAL MUST (msDFS-SchemaMajorVersion ) )",
"( 1.2.840.113556.1.6.13.4.7 NAME 'msDFSR-ContentSet' SUP top STRUCTURAL MAY (description $ msDFSR-Extension $ msDFSR-RootSizeInMb $ msDFSR-StagingSizeInMb $ msDFSR-ConflictSizeInMb $ msDFSR-FileFilter $ msDFSR-DirectoryFilter $ msDFSR-Flags $ msDFSR-Options $ msDFSR-DfsPath $ msDFSR-Priority $ msDFSR-DeletedSizeInMb $ msDFSR-DefaultCompressionExclusionFilter $ msDFSR-OnDemandExclusionFileFilter $ msDFSR-OnDemandExclusionDirectoryFilter $ msDFSR-Options2 ) )",
"( 1.2.840.113556.1.5.276 NAME 'msTPM-InformationObjectsContainer' SUP top STRUCTURAL MUST (cn ) )",
"( 1.2.840.113556.1.5.209 NAME 'msWMI-RealRangeParam' SUP msWMI-RangeParam STRUCTURAL MUST (msWMI-Int8Default ) MAY (msWMI-Int8Max $ msWMI-Int8Min ) )",
"( 2.5.6.7 NAME 'organizationalPerson' SUP person STRUCTURAL MAY (c $ l $ st $ street $ o $ ou $ title $ postalAddress $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ telexNumber $ teletexTerminalIdentifier $ facsimileTelephoneNumber $ x121Address $ internationalISDNNumber $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ givenName $ initials $ generationQualifier $ houseIdentifier $ otherTelephone $ otherPager $ co $ department $ company $ streetAddress $ otherHomePhone $ msExchHouseIdentifier $ personalTitle $ homePostalAddress $ countryCode $ employeeID $ comment $ division $ otherFacsimileTelephoneNumber $ otherMobile $ primaryTelexNumber $ primaryInternationalISDNNumber $ mhsORAddress $ otherMailbox $ assistant $ ipPhone $ otherIpPhone $ msDS-AllowedToDelegateTo $ msDS-PhoneticFirstName $ msDS-PhoneticLastName $ msDS-PhoneticDepartment $ msDS-PhoneticCompanyName $ msDS-PhoneticDisplayName $ msDS-HABSeniorityIndex $ msDS-AllowedToActOnBehalfOfOtherIdentity $ mail $ manager $ homePhone $ mobile $ pager $ middleName $ thumbnailPhoto $ thumbnailLogo ) )",
"( 1.2.840.113556.1.5.176 NAME 'msExchConfigurationContainer' SUP container STRUCTURAL MAY (addressBookRoots $ globalAddressList $ templateRoots $ addressBookRoots2 $ globalAddressList2 $ templateRoots2 ) )",
"( 1.2.840.113556.1.5.278 NAME 'msKds-ProvRootKey' SUP top STRUCTURAL MUST (cn $ msKds-KDFAlgorithmID $ msKds-SecretAgreementAlgorithmID $ msKds-PublicKeyLength $ msKds-PrivateKeyLength $ msKds-RootKeyData $ msKds-Version $ msKds-DomainID $ msKds-UseStartTime $ msKds-CreateTime ) MAY (msKds-KDFParam $ msKds-SecretAgreementParam ) )",
"( 1.2.840.113556.1.5.238 NAME 'msDS-AzTask' SUP top STRUCTURAL MAY (description $ msDS-AzBizRule $ msDS-AzBizRuleLanguage $ msDS-AzLastImportedBizRulePath $ msDS-OperationsForAzTask $ msDS-TasksForAzTask $ msDS-AzTaskIsRoleDefinition $ msDS-AzApplicationData $ msDS-AzObjectGuid $ msDS-AzGenericData ) )",
"( 1.2.840.113556.1.5.282 NAME 'msDS-GroupManagedServiceAccount' SUP computer STRUCTURAL MUST (msDS-ManagedPasswordInterval ) MAY (msDS-ManagedPassword $ msDS-ManagedPasswordId $ msDS-ManagedPasswordPreviousId $ msDS-GroupMSAMembership ) )",
"( 1.3.6.1.1.1.2.9 NAME 'nisMap' SUP top STRUCTURAL MUST (cn $ nisMapName ) MAY (description ) )",
"( 1.3.6.1.1.1.2.10 NAME 'nisObject' SUP top STRUCTURAL MUST (cn $ nisMapName $ nisMapEntry ) MAY (description $ msSFU30Name $ msSFU30NisDomain ) )",
"( 1.2.840.113556.1.5.277 NAME 'msKds-ProvServerConfiguration' SUP top STRUCTURAL MUST (msKds-Version ) MAY (msKds-KDFAlgorithmID $ msKds-KDFParam $ msKds-SecretAgreementAlgorithmID $ msKds-SecretAgreementParam $ msKds-PublicKeyLength $ msKds-PrivateKeyLength ) )",
"( 1.2.840.113556.1.6.18.2.217 NAME 'msSFU30NISMapConfig' SUP top STRUCTURAL MAY (msSFU30KeyAttributes $ msSFU30FieldSeparator $ msSFU30IntraFieldSeparator $ msSFU30SearchAttributes $ msSFU30ResultAttributes $ msSFU30MapFilter $ msSFU30NSMAPFieldPosition ) )",
"( 1.2.840.113556.1.5.7000.48 NAME 'serversContainer' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.90 NAME 'linkTrackVolumeTable' SUP fileLinkTracking STRUCTURAL )",
"( 1.2.840.113556.1.5.188 NAME 'mS-SQL-SQLDatabase' SUP top STRUCTURAL MAY (mS-SQL-Name $ mS-SQL-Contact $ mS-SQL-Status $ mS-SQL-InformationURL $ mS-SQL-Description $ mS-SQL-Alias $ mS-SQL-Size $ mS-SQL-CreationDate $ mS-SQL-LastBackupDate $ mS-SQL-LastDiagnosticDate $ mS-SQL-Applications $ mS-SQL-Keywords ) )",
"( 1.2.840.113556.1.5.211 NAME 'msWMI-PolicyType' SUP top STRUCTURAL MUST (msWMI-ID $ msWMI-TargetObject ) MAY (msWMI-Author $ msWMI-ChangeDate $ msWMI-CreationDate $ msWMI-SourceOrganization $ msWMI-intFlags1 $ msWMI-intFlags2 $ msWMI-intFlags3 $ msWMI-intFlags4 $ msWMI-Parm1 $ msWMI-Parm2 $ msWMI-Parm3 $ msWMI-Parm4 ) )",
"( 1.2.840.113556.1.5.183 NAME 'dSUISettings' SUP top STRUCTURAL MAY (dSUIAdminNotification $ dSUIAdminMaximum $ dSUIShellMaximum $ msDS-Security-Group-Extra-Classes $ msDS-Non-Security-Group-Extra-Classes $ msDS-FilterContainers ) )",
"( 1.2.840.113556.1.5.157 NAME 'groupPolicyContainer' SUP container STRUCTURAL MAY (flags $ versionNumber $ gPCFunctionalityVersion $ gPCFileSysPath $ gPCMachineExtensionNames $ gPCUserExtensionNames $ gPCWQLFilter ) )",
"( 1.2.840.113556.1.5.3 NAME 'samDomain' SUP top AUXILIARY MAY (description $ cACertificate $ builtinCreationTime $ builtinModifiedCount $ creationTime $ domainPolicyObject $ defaultLocalPolicyObject $ lockoutDuration $ lockOutObservationWindow $ lSACreationTime $ lSAModifiedCount $ lockoutThreshold $ maxPwdAge $ minPwdAge $ minPwdLength $ modifiedCountAtLastProm $ nETBIOSName $ nextRid $ pwdProperties $ pwdHistoryLength $ privateKey $ replicaSource $ controlAccessRights $ auditingPolicy $ eFSPolicy $ desktopProfile $ nTMixedDomain $ rIDManagerReference $ treeName $ pekList $ pekKeyChangeInterval $ gPLink $ gPOptions $ ms-DS-MachineAccountQuota $ msDS-LogonTimeSyncInterval $ msDS-PerUserTrustQuota $ msDS-AllUsersTrustQuota $ msDS-PerUserTrustTombstonesQuota ) )",
"( 1.2.840.113556.1.5.234 NAME 'msDS-AzAdminManager' SUP top STRUCTURAL MAY (description $ msDS-AzDomainTimeout $ msDS-AzScriptEngineCacheMax $ msDS-AzScriptTimeout $ msDS-AzGenerateAudits $ msDS-AzApplicationData $ msDS-AzMajorVersion $ msDS-AzMinorVersion $ msDS-AzObjectGuid $ msDS-AzGenericData ) )",
"( 1.2.840.113556.1.5.214 NAME 'msWMI-Rule' SUP top STRUCTURAL MUST (msWMI-Query $ msWMI-QueryLanguage $ msWMI-TargetNameSpace ) )",
"( 1.2.840.113556.1.5.254 NAME 'nTDSDSARO' SUP nTDSDSA STRUCTURAL )",
"( 1.2.840.113556.1.5.286 NAME 'msDS-Device' SUP top STRUCTURAL MUST (displayName $ altSecurityIdentities $ msDS-IsEnabled $ msDS-DeviceID ) MAY (msDS-DeviceOSType $ msDS-DeviceOSVersion $ msDS-DevicePhysicalIDs $ msDS-DeviceObjectVersion $ msDS-RegisteredOwner $ msDS-ApproximateLastLogonTimeStamp $ msDS-RegisteredUsers $ msDS-IsManaged $ msDS-CloudIsManaged $ msDS-CloudAnchor ) )",
"( 1.2.840.113556.1.5.34 NAME 'trustedDomain' SUP leaf STRUCTURAL MAY (securityIdentifier $ trustAuthIncoming $ trustDirection $ trustPartner $ trustPosixOffset $ trustAuthOutgoing $ trustType $ trustAttributes $ domainCrossRef $ flatName $ initialAuthIncoming $ initialAuthOutgoing $ domainIdentifier $ additionalTrustedServiceNames $ mS-DS-CreatorSID $ msDS-TrustForestTrustInfo $ msDS-SupportedEncryptionTypes $ msDS-IngressClaimsTransformationPolicy $ msDS-EgressClaimsTransformationPolicy ) )",
"( 0.9.2342.19200300.100.4.7 NAME 'room' SUP top STRUCTURAL MUST (cn ) MAY (description $ telephoneNumber $ seeAlso $ location $ roomNumber ) )",
"( 2.5.6.4 NAME 'organization' SUP top STRUCTURAL MUST (o ) MAY (l $ st $ street $ searchGuide $ businessCategory $ postalAddress $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ telephoneNumber $ telexNumber $ teletexTerminalIdentifier $ facsimileTelephoneNumber $ x121Address $ internationalISDNNumber $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ seeAlso $ userPassword ) )",
"( 1.2.840.113556.1.5.272 NAME 'msDS-ClaimType' SUP msDS-ClaimTypePropertyBase STRUCTURAL MAY (msDS-ClaimValueType $ msDS-ClaimAttributeSource $ msDS-ClaimTypeAppliesToClass $ msDS-ClaimSource $ msDS-ClaimSourceType $ msDS-ClaimIsValueSpaceRestricted $ msDS-ClaimIsSingleValued ) )",
"( 1.3.6.1.1.1.2.3 NAME 'ipService' SUP top STRUCTURAL MUST (cn $ ipServicePort $ ipServiceProtocol ) MAY (description $ msSFU30Name $ msSFU30Aliases $ msSFU30NisDomain $ nisMapName ) )",
"( 1.3.6.1.1.1.2.4 NAME 'ipProtocol' SUP top STRUCTURAL MUST (cn $ ipProtocolNumber ) MAY (description $ msSFU30Name $ msSFU30Aliases $ msSFU30NisDomain $ nisMapName ) )",
"( 1.2.840.113556.1.5.80 NAME 'rpcGroup' SUP rpcEntry STRUCTURAL MAY (rpcNsGroup $ rpcNsObjectID ) )",
"( 1.2.840.113556.1.5.17 NAME 'server' SUP top STRUCTURAL MAY (serialNumber $ serverReference $ dNSHostName $ managedBy $ mailAddress $ bridgeheadTransportList $ msDS-isGC $ msDS-isRODC $ msDS-SiteName $ msDS-IsUserCachableAtRodc ) )",
"( 1.2.840.113556.1.5.28 NAME 'secret' SUP leaf STRUCTURAL MAY (currentValue $ lastSetTime $ priorSetTime $ priorValue ) )",
"( 1.2.840.113556.1.5.163 NAME 'mSMQEnterpriseSettings' SUP top STRUCTURAL MAY (mSMQNameStyle $ mSMQCSPName $ mSMQLongLived $ mSMQVersion $ mSMQInterval1 $ mSMQInterval2 ) )",
"( 1.2.840.113556.1.5.202 NAME 'msWMI-MergeablePolicyTemplate' SUP msWMI-PolicyTemplate STRUCTURAL )",
"( 1.2.840.113556.1.5.195 NAME 'msPKI-Key-Recovery-Agent' SUP user STRUCTURAL )",
"( 0.9.2342.19200300.100.4.18 NAME 'friendlyCountry' SUP country STRUCTURAL MUST (co ) )",
"( 1.2.840.113556.1.5.258 NAME 'msDFS-Namespacev2' SUP top STRUCTURAL MUST (msDFS-SchemaMajorVersion $ msDFS-SchemaMinorVersion $ msDFS-GenerationGUIDv2 $ msDFS-NamespaceIdentityGUIDv2 $ msDFS-LastModifiedv2 $ msDFS-Ttlv2 $ msDFS-Propertiesv2 $ msDFS-TargetListv2 ) MAY (msDFS-Commentv2 ) )",
"( 1.2.840.113556.1.5.96 NAME 'subnet' SUP top STRUCTURAL MAY (location $ siteObject $ physicalLocationObject ) )",
"( 1.2.840.113556.1.5.216 NAME 'applicationVersion' SUP applicationSettings STRUCTURAL MAY (owner $ keywords $ versionNumber $ vendor $ versionNumberHi $ versionNumberLo $ managedBy $ appSchemaVersion ) )",
"( 2.5.6.10 NAME 'residentialPerson' SUP person STRUCTURAL MAY (l $ st $ street $ ou $ title $ businessCategory $ postalAddress $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ telexNumber $ teletexTerminalIdentifier $ facsimileTelephoneNumber $ x121Address $ internationalISDNNumber $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod ) )",
"( 2.5.6.19 NAME 'cRLDistributionPoint' SUP top STRUCTURAL MUST (cn ) MAY (authorityRevocationList $ certificateRevocationList $ deltaRevocationList $ cRLPartitionedRevocationList $ certificateAuthorityObject ) )",
"( 1.2.840.113556.1.5.137 NAME 'aCSPolicy' SUP top STRUCTURAL MAY (aCSTimeOfDay $ aCSDirection $ aCSMaxTokenRatePerFlow $ aCSMaxPeakBandwidthPerFlow $ aCSAggregateTokenRatePerUser $ aCSMaxDurationPerFlow $ aCSServiceType $ aCSTotalNoOfFlows $ aCSPriority $ aCSPermissionBits $ aCSIdentityName $ aCSMaxAggregatePeakRatePerUser $ aCSMaxTokenBucketPerFlow $ aCSMaximumSDUSize $ aCSMinimumPolicedSize $ aCSMinimumLatency $ aCSMinimumDelayVariation ) )",
"( 1.2.840.113556.1.5.77 NAME 'controlAccessRight' SUP top STRUCTURAL MAY (rightsGuid $ appliesTo $ localizationDisplayId $ validAccesses ) )",
"( 1.2.840.113556.1.5.219 NAME 'msMQ-Group' SUP top STRUCTURAL MUST (member ) )",
"( 1.2.840.113556.1.5.8 NAME 'group' SUP top STRUCTURAL MUST (groupType ) MAY (member $ nTGroupMembers $ operatorCount $ adminCount $ groupAttributes $ groupMembershipSAM $ controlAccessRights $ desktopProfile $ nonSecurityMember $ managedBy $ primaryGroupToken $ msDS-AzLDAPQuery $ msDS-NonMembers $ msDS-AzBizRule $ msDS-AzBizRuleLanguage $ msDS-AzLastImportedBizRulePath $ msDS-AzApplicationData $ msDS-AzObjectGuid $ msDS-AzGenericData $ msDS-PrimaryComputer $ mail $ msSFU30Name $ msSFU30NisDomain $ msSFU30PosixMember ) )",
"( 1.2.840.113556.1.6.23.2 NAME 'msPrint-ConnectionPolicy' SUP top STRUCTURAL MUST (cn ) MAY (uNCName $ serverName $ printAttributes $ printerName ) )",
"( 1.2.840.113556.1.3.11 NAME 'crossRef' SUP top STRUCTURAL MUST (cn $ nCName $ dnsRoot ) MAY (Enabled $ nETBIOSName $ nTMixedDomain $ trustParent $ superiorDNSRoot $ rootTrust $ msDS-Behavior-Version $ msDS-NC-Replica-Locations $ msDS-Replication-Notify-First-DSA-Delay $ msDS-Replication-Notify-Subsequent-DSA-Delay $ msDS-SDReferenceDomain $ msDS-DnsRootAlias $ msDS-NC-RO-Replica-Locations ) )",
"( 1.2.840.113556.1.6.13.4.9 NAME 'msDFSR-Member' SUP top STRUCTURAL MUST (msDFSR-ComputerReference ) MAY (serverReference $ msDFSR-Extension $ msDFSR-Keywords $ msDFSR-Flags $ msDFSR-Options $ msDFSR-Options2 ) )",
"( 1.2.840.113556.1.3.59 NAME 'displayTemplate' SUP top STRUCTURAL MUST (cn ) MAY (helpData32 $ originalDisplayTableMSDOS $ addressEntryDisplayTable $ helpFileName $ addressEntryDisplayTableMSDOS $ helpData16 $ originalDisplayTable ) )",
"( 1.2.840.113556.1.3.13 NAME 'classSchema' SUP top STRUCTURAL MUST (cn $ subClassOf $ governsID $ objectClassCategory $ schemaIDGUID $ defaultObjectCategory ) MAY (possSuperiors $ mustContain $ mayContain $ rDNAttID $ auxiliaryClass $ lDAPDisplayName $ schemaFlagsEx $ systemOnly $ systemPossSuperiors $ systemMayContain $ systemMustContain $ systemAuxiliaryClass $ defaultSecurityDescriptor $ defaultHidingValue $ classDisplayName $ isDefunct $ msDs-Schema-Extensions $ msDS-IntId ) )",
"( 1.2.840.113556.1.5.200 NAME 'msWMI-PolicyTemplate' SUP top STRUCTURAL MUST (msWMI-ID $ msWMI-Name $ msWMI-NormalizedClass $ msWMI-TargetClass $ msWMI-TargetNameSpace $ msWMI-TargetPath ) MAY (msWMI-Author $ msWMI-ChangeDate $ msWMI-CreationDate $ msWMI-SourceOrganization $ msWMI-TargetType $ msWMI-intFlags1 $ msWMI-intFlags2 $ msWMI-intFlags3 $ msWMI-intFlags4 $ msWMI-Parm1 $ msWMI-Parm2 $ msWMI-Parm3 $ msWMI-Parm4 ) )",
"( 1.2.840.113556.1.5.165 NAME 'mSMQSettings' SUP top STRUCTURAL MAY (mSMQOwnerID $ mSMQServices $ mSMQQMID $ mSMQMigrated $ mSMQNt4Flags $ mSMQSiteName $ mSMQRoutingService $ mSMQDsService $ mSMQDependentClientService $ mSMQSiteNameEx ) )",
"( 1.3.6.1.1.1.2.5 NAME 'oncRpc' SUP top STRUCTURAL MUST (cn $ oncRpcNumber ) MAY (description $ msSFU30Name $ msSFU30Aliases $ msSFU30NisDomain $ nisMapName ) )",
"( 1.2.840.113556.1.5.126 NAME 'serviceConnectionPoint' SUP connectionPoint STRUCTURAL MAY (versionNumber $ vendor $ versionNumberHi $ versionNumberLo $ serviceClassName $ serviceBindingInformation $ serviceDNSName $ serviceDNSNameType $ appSchemaVersion ) )",
"( 1.2.840.113556.1.5.4 NAME 'builtinDomain' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.241 NAME 'msDS-AppData' SUP applicationSettings STRUCTURAL MAY (owner $ keywords $ managedBy $ msDS-ByteArray $ msDS-DateTime $ msDS-Integer $ msDS-ObjectReference ) )",
"( 1.2.840.113556.1.5.73 NAME 'rpcServerElement' SUP rpcEntry STRUCTURAL MUST (rpcNsBindings $ rpcNsInterfaceID $ rpcNsTransferSyntax ) )",
"( 1.2.840.113556.1.5.150 NAME 'rRASAdministrationConnectionPoint' SUP serviceAdministrationPoint STRUCTURAL MAY (msRRASAttribute ) )",
"( 1.2.840.113556.1.5.191 NAME 'aCSResourceLimits' SUP top STRUCTURAL MAY (aCSMaxTokenRatePerFlow $ aCSMaxPeakBandwidthPerFlow $ aCSServiceType $ aCSAllocableRSVPBandwidth $ aCSMaxPeakBandwidth ) )",
"( 2.5.6.3 NAME 'locality' SUP top STRUCTURAL MUST (l ) MAY (st $ street $ searchGuide $ seeAlso ) )",
"( 1.3.6.1.1.1.2.6 NAME 'ipHost' SUP top AUXILIARY MAY (cn $ l $ description $ uid $ manager $ ipHostNumber ) )",
"( 1.2.840.113556.1.5.275 NAME 'msTPM-InformationObject' SUP top STRUCTURAL MUST (msTPM-OwnerInformation ) MAY (msTPM-SrkPubThumbprint $ msTPM-OwnerInformationTemp ) )",
"( 1.2.840.113556.1.5.289 NAME 'msDS-DeviceContainer' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.4.2129 NAME 'msDNS-ServerSettings' SUP top STRUCTURAL MAY (msDNS-KeymasterZones ) )",
"( 1.2.840.113556.1.5.76 NAME 'foreignSecurityPrincipal' SUP top STRUCTURAL MUST (objectSid ) MAY (foreignIdentifier ) )",
"( 1.2.840.113556.1.5.44 NAME 'classStore' SUP top STRUCTURAL MAY (versionNumber $ nextLevelStore $ lastUpdateSequence $ appSchemaVersion ) )",
"( 0.9.2342.19200300.100.4.5 NAME 'account' SUP top STRUCTURAL MAY (l $ o $ ou $ description $ seeAlso $ uid $ host ) )",
"( 1.2.840.113556.1.5.26 NAME 'rpcProfileElement' SUP rpcEntry STRUCTURAL MUST (rpcNsInterfaceID $ rpcNsPriority ) MAY (rpcNsProfileEntry $ rpcNsAnnotation ) )",
"( 1.2.840.113556.1.5.215 NAME 'msWMI-WMIGPO' SUP top STRUCTURAL MUST (msWMI-TargetClass ) MAY (msWMI-intFlags1 $ msWMI-intFlags2 $ msWMI-intFlags3 $ msWMI-intFlags4 $ msWMI-Parm1 $ msWMI-Parm2 $ msWMI-Parm3 $ msWMI-Parm4 ) )",
"( 1.2.840.113556.1.5.243 NAME 'msDS-QuotaControl' SUP top STRUCTURAL MUST (cn $ msDS-QuotaTrustee $ msDS-QuotaAmount ) )",
"( 1.2.840.113556.1.5.256 NAME 'msDS-PasswordSettingsContainer' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.187 NAME 'mS-SQL-SQLPublication' SUP top STRUCTURAL MAY (mS-SQL-Name $ mS-SQL-Status $ mS-SQL-Description $ mS-SQL-Type $ mS-SQL-Database $ mS-SQL-AllowAnonymousSubscription $ mS-SQL-Publisher $ mS-SQL-AllowKnownPullSubscription $ mS-SQL-AllowImmediateUpdatingSubscription $ mS-SQL-AllowQueuedUpdatingSubscription $ mS-SQL-AllowSnapshotFilesFTPDownloading $ mS-SQL-ThirdParty ) )",
"( 1.2.840.113556.1.5.9 NAME 'user' SUP organizationalPerson STRUCTURAL MAY (o $ businessCategory $ userCertificate $ givenName $ initials $ x500uniqueIdentifier $ displayName $ networkAddress $ employeeNumber $ employeeType $ homePostalAddress $ userAccountControl $ badPwdCount $ codePage $ homeDirectory $ homeDrive $ badPasswordTime $ lastLogoff $ lastLogon $ dBCSPwd $ localeID $ scriptPath $ logonHours $ logonWorkstation $ maxStorage $ userWorkstations $ unicodePwd $ otherLoginWorkstations $ ntPwdHistory $ pwdLastSet $ preferredOU $ primaryGroupID $ userParameters $ profilePath $ operatorCount $ adminCount $ accountExpires $ lmPwdHistory $ groupMembershipSAM $ logonCount $ controlAccessRights $ defaultClassStore $ groupsToIgnore $ groupPriority $ desktopProfile $ dynamicLDAPServer $ userPrincipalName $ lockoutTime $ userSharedFolder $ userSharedFolderOther $ servicePrincipalName $ aCSPolicyName $ terminalServer $ mSMQSignCertificates $ mSMQDigests $ mSMQDigestsMig $ mSMQSignCertificatesMig $ msNPAllowDialin $ msNPCallingStationID $ msNPSavedCallingStationID $ msRADIUSCallbackNumber $ msRADIUSFramedIPAddress $ msRADIUSFramedRoute $ msRADIUSServiceType $ msRASSavedCallbackNumber $ msRASSavedFramedIPAddress $ msRASSavedFramedRoute $ mS-DS-CreatorSID $ msCOM-UserPartitionSetLink $ msDS-Cached-Membership $ msDS-Cached-Membership-Time-Stamp $ msDS-Site-Affinity $ msDS-User-Account-Control-Computed $ lastLogonTimestamp $ msIIS-FTPRoot $ msIIS-FTPDir $ msDRM-IdentityCertificate $ msDS-SourceObjectDN $ msPKIRoamingTimeStamp $ msPKIDPAPIMasterKeys $ msPKIAccountCredentials $ msRADIUS-FramedInterfaceId $ msRADIUS-SavedFramedInterfaceId $ msRADIUS-FramedIpv6Prefix $ msRADIUS-SavedFramedIpv6Prefix $ msRADIUS-FramedIpv6Route $ msRADIUS-SavedFramedIpv6Route $ msDS-SecondaryKrbTgtNumber $ msDS-AuthenticatedAtDC $ msDS-SupportedEncryptionTypes $ msDS-LastSuccessfulInteractiveLogonTime $ msDS-LastFailedInteractiveLogonTime $ msDS-FailedInteractiveLogonCount $ msDS-FailedInteractiveLogonCountAtLastSuccessfulLogon $ msTSProfilePath $ msTSHomeDirectory $ msTSHomeDrive $ msTSAllowLogon $ msTSRemoteControl $ msTSMaxDisconnectionTime $ msTSMaxConnectionTime $ msTSMaxIdleTime $ msTSReconnectionAction $ msTSBrokenConnectionAction $ msTSConnectClientDrives $ msTSConnectPrinterDrives $ msTSDefaultToMainPrinter $ msTSWorkDirectory $ msTSInitialProgram $ msTSProperty01 $ msTSProperty02 $ msTSExpireDate $ msTSLicenseVersion $ msTSManagingLS $ msDS-UserPasswordExpiryTimeComputed $ msTSExpireDate2 $ msTSLicenseVersion2 $ msTSManagingLS2 $ msTSExpireDate3 $ msTSLicenseVersion3 $ msTSManagingLS3 $ msTSExpireDate4 $ msTSLicenseVersion4 $ msTSManagingLS4 $ msTSLSProperty01 $ msTSLSProperty02 $ msDS-ResultantPSO $ msPKI-CredentialRoamingTokens $ msTSPrimaryDesktop $ msTSSecondaryDesktops $ msDS-PrimaryComputer $ msDS-SyncServerUrl $ msDS-AssignedAuthNPolicySilo $ msDS-AuthNPolicySiloMembersBL $ msDS-AssignedAuthNPolicy $ userSMIMECertificate $ uid $ mail $ roomNumber $ photo $ manager $ homePhone $ secretary $ mobile $ pager $ audio $ jpegPhoto $ carLicense $ departmentNumber $ preferredLanguage $ userPKCS12 $ labeledURI $ msSFU30Name $ msSFU30NisDomain ) )",
"( 1.2.840.113556.1.5.259 NAME 'msDFS-Linkv2' SUP top STRUCTURAL MUST (msDFS-GenerationGUIDv2 $ msDFS-NamespaceIdentityGUIDv2 $ msDFS-LastModifiedv2 $ msDFS-Ttlv2 $ msDFS-Propertiesv2 $ msDFS-TargetListv2 $ msDFS-LinkPathv2 $ msDFS-LinkIdentityGUIDv2 ) MAY (msDFS-Commentv2 $ msDFS-LinkSecurityDescriptorv2 $ msDFS-ShortNameLinkPathv2 ) )",
"( 1.2.840.113556.1.5.141 NAME 'interSiteTransport' SUP top STRUCTURAL MUST (transportDLLName $ transportAddressAttribute ) MAY (options $ replInterval ) )",
"( 1.2.840.113556.1.6.13.4.4 NAME 'msDFSR-GlobalSettings' SUP top STRUCTURAL MAY (msDFSR-Extension $ msDFSR-Flags $ msDFSR-Options $ msDFSR-Options2 ) )",
"( 1.2.840.113556.1.5.29 NAME 'serviceClass' SUP leaf STRUCTURAL MUST (displayName $ serviceClassID ) MAY (serviceClassInfo ) )",
"( 1.2.840.113556.1.5.189 NAME 'mS-SQL-OLAPDatabase' SUP top STRUCTURAL MAY (mS-SQL-Name $ mS-SQL-Contact $ mS-SQL-Status $ mS-SQL-LastUpdatedDate $ mS-SQL-InformationURL $ mS-SQL-ConnectionURL $ mS-SQL-PublicationURL $ mS-SQL-Description $ mS-SQL-Type $ mS-SQL-Size $ mS-SQL-LastBackupDate $ mS-SQL-Applications $ mS-SQL-Keywords ) )",
"( 2.5.6.16 NAME 'certificationAuthority' SUP top STRUCTURAL MUST (cn $ cACertificate $ authorityRevocationList $ certificateRevocationList ) MAY (searchGuide $ teletexTerminalIdentifier $ supportedApplicationContext $ crossCertificatePair $ deltaRevocationList $ domainPolicyObject $ parentCA $ dNSHostName $ parentCACertificateChain $ domainID $ cAConnect $ cAWEBURL $ cRLObject $ cAUsages $ previousCACertificates $ pendingCACertificates $ previousParentCA $ pendingParentCA $ currentParentCA $ cACertificateDN $ certificateTemplates $ signatureAlgorithms $ enrollmentProviders ) )",
"( 1.2.840.113556.1.5.104 NAME 'meeting' SUP top STRUCTURAL MUST (meetingName ) MAY (meetingID $ meetingDescription $ meetingKeyword $ meetingLocation $ meetingProtocol $ meetingType $ meetingApplication $ meetingLanguage $ meetingMaxParticipants $ meetingOriginator $ meetingContactInfo $ meetingOwner $ meetingIP $ meetingScope $ meetingAdvertiseScope $ meetingURL $ meetingRating $ meetingIsEncrypted $ meetingRecurrence $ meetingStartTime $ meetingEndTime $ meetingBandwidth $ meetingBlob ) )",
"( 1.2.840.113556.1.5.287 NAME 'msDS-DeviceRegistrationServiceContainer' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.71 NAME 'nTDSConnection' SUP leaf STRUCTURAL MUST (enabledConnection $ fromServer $ options ) MAY (generatedConnection $ schedule $ transportType $ mS-DS-ReplicatesNCReason ) )",
"( 1.2.840.113556.1.5.291 NAME 'msDS-AuthNPolicySilos' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.218 NAME 'msMQ-Custom-Recipient' SUP top STRUCTURAL MAY (msMQ-Recipient-FormatName ) )",
"( 1.2.840.113556.1.5.72 NAME 'nTDSService' SUP top STRUCTURAL MAY (tombstoneLifetime $ dSHeuristics $ garbageCollPeriod $ replTopologyStayOfExecution $ sPNMappings $ msDS-Other-Settings $ msDS-DeletedObjectLifetime ) )",
"( 1.2.840.113556.1.3.9 NAME 'dMD' SUP top STRUCTURAL MUST (cn ) MAY (dmdName $ schemaUpdate $ prefixMap $ schemaInfo $ msDs-Schema-Extensions $ msDS-IntId $ msDS-USNLastSyncSuccess ) )",
"( 1.2.840.113556.1.5.280 NAME 'msDS-ClaimsTransformationPolicyType' SUP top STRUCTURAL MAY (msDS-TransformationRules $ msDS-TransformationRulesCompiled ) )",
"( 0.9.2342.19200300.100.4.14 NAME 'rFC822LocalPart' SUP domain STRUCTURAL MAY (cn $ sn $ street $ description $ postalAddress $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ telephoneNumber $ telexNumber $ teletexTerminalIdentifier $ facsimileTelephoneNumber $ x121Address $ internationalISDNNumber $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ seeAlso ) )",
"( 1.2.840.113556.1.5.190 NAME 'mS-SQL-OLAPCube' SUP top STRUCTURAL MAY (mS-SQL-Name $ mS-SQL-Contact $ mS-SQL-Status $ mS-SQL-LastUpdatedDate $ mS-SQL-InformationURL $ mS-SQL-PublicationURL $ mS-SQL-Description $ mS-SQL-Size $ mS-SQL-Keywords ) )",
"( 1.2.840.113556.1.5.208 NAME 'msWMI-UintSetParam' SUP msWMI-RangeParam STRUCTURAL MUST (msWMI-IntDefault ) MAY (msWMI-IntValidValues ) )",
"( 1.3.6.1.1.1.2.2 NAME 'posixGroup' SUP top AUXILIARY MAY (cn $ description $ userPassword $ unixUserPassword $ gidNumber $ memberUid ) )",
"( 2.5.6.17 NAME 'groupOfUniqueNames' SUP top STRUCTURAL MUST (cn $ uniqueMember ) MAY (o $ ou $ description $ businessCategory $ owner $ seeAlso ) )",
"( 1.2.840.113556.1.5.252 NAME 'ms-net-ieee-8023-GroupPolicy' SUP top STRUCTURAL MAY (ms-net-ieee-8023-GP-PolicyGUID $ ms-net-ieee-8023-GP-PolicyData $ ms-net-ieee-8023-GP-PolicyReserved ) )",
"( 1.2.840.113556.1.5.119 NAME 'ipsecNegotiationPolicy' SUP ipsecBase STRUCTURAL MAY (iPSECNegotiationPolicyType $ iPSECNegotiationPolicyAction ) )",
"( 1.2.840.113556.1.5.292 NAME 'msDS-AuthNPolicySilo' SUP top STRUCTURAL MAY (msDS-AssignedAuthNPolicySiloBL $ msDS-AuthNPolicySiloMembers $ msDS-UserAuthNPolicy $ msDS-ComputerAuthNPolicy $ msDS-ServiceAuthNPolicy $ msDS-AuthNPolicySiloEnforced ) )",
"( 1.2.840.113556.1.5.121 NAME 'ipsecNFA' SUP ipsecBase STRUCTURAL MAY (ipsecNegotiationPolicyReference $ ipsecFilterReference ) )",
"( 1.2.840.113556.1.5.42 NAME 'dfsConfiguration' SUP top STRUCTURAL )",
"( 0.9.2342.19200300.100.4.9 NAME 'documentSeries' SUP top STRUCTURAL MUST (cn ) MAY (l $ o $ ou $ description $ telephoneNumber $ seeAlso ) )",
"( 1.2.840.113556.1.5.271 NAME 'msDS-ResourceProperties' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.91 NAME 'linkTrackObjectMoveTable' SUP fileLinkTracking STRUCTURAL )",
"( 1.2.840.113556.1.5.136 NAME 'rpcContainer' SUP container STRUCTURAL MAY (nameServiceFlags ) )",
"( 1.2.840.113556.1.5.83 NAME 'rIDManager' SUP top STRUCTURAL MUST (rIDAvailablePool ) MAY (msDS-RIDPoolAllocationEnabled ) )",
"( 1.2.840.113556.1.5.206 NAME 'msWMI-IntSetParam' SUP msWMI-RangeParam STRUCTURAL MUST (msWMI-IntDefault ) MAY (msWMI-IntValidValues ) )",
"( 1.2.840.113556.1.6.13.4.5 NAME 'msDFSR-ReplicationGroup' SUP top STRUCTURAL MUST (msDFSR-ReplicationGroupType ) MAY (description $ msDFSR-Version $ msDFSR-Extension $ msDFSR-RootSizeInMb $ msDFSR-StagingSizeInMb $ msDFSR-ConflictSizeInMb $ msDFSR-TombstoneExpiryInMin $ msDFSR-FileFilter $ msDFSR-DirectoryFilter $ msDFSR-Schedule $ msDFSR-Flags $ msDFSR-Options $ msDFSR-DeletedSizeInMb $ msDFSR-DefaultCompressionExclusionFilter $ msDFSR-OnDemandExclusionFileFilter $ msDFSR-OnDemandExclusionDirectoryFilter $ msDFSR-Options2 ) )",
"( 1.2.840.113556.1.5.125 NAME 'addressBookContainer' SUP top STRUCTURAL MUST (displayName ) MAY (purportedSearch ) )",
"( 1.2.840.113556.1.5.7000.49 NAME 'applicationSettings' SUP top ABSTRACT MAY (applicationName $ notificationList $ msDS-Settings ) )",
"( 1.2.840.113556.1.5.265 NAME 'msDS-OptionalFeature' SUP top STRUCTURAL MUST (msDS-OptionalFeatureGUID $ msDS-OptionalFeatureFlags ) MAY (msDS-RequiredDomainBehaviorVersion $ msDS-RequiredForestBehaviorVersion ) )",
"( 1.2.840.113556.1.5.94 NAME 'serviceAdministrationPoint' SUP serviceConnectionPoint STRUCTURAL )",
"( 1.2.840.113556.1.5.102 NAME 'nTFRSReplicaSet' SUP top STRUCTURAL MAY (fRSReplicaSetType $ fRSVersionGUID $ schedule $ fRSFileFilter $ fRSDirectoryFilter $ fRSDSPoll $ fRSServiceCommand $ fRSReplicaSetGUID $ fRSLevelLimit $ fRSRootSecurity $ fRSExtensions $ managedBy $ fRSFlags $ fRSPartnerAuthLevel $ fRSPrimaryMember $ msFRS-Topology-Pref $ msFRS-Hub-Member ) )",
"( 1.2.840.113556.1.5.203 NAME 'msWMI-RangeParam' SUP top STRUCTURAL MUST (msWMI-PropertyName $ msWMI-TargetClass $ msWMI-TargetType ) )",
"( 1.2.840.113556.1.5.7000.56 NAME 'ipsecBase' SUP top ABSTRACT MAY (ipsecName $ ipsecID $ ipsecDataType $ ipsecData $ ipsecOwnersReference ) )",
"( 1.2.840.113556.1.6.13.4.3 NAME 'msDFSR-Subscription' SUP top STRUCTURAL MUST (msDFSR-ContentSetGuid $ msDFSR-ReplicationGroupGuid ) MAY (msDFSR-Extension $ msDFSR-RootPath $ msDFSR-RootSizeInMb $ msDFSR-StagingPath $ msDFSR-StagingSizeInMb $ msDFSR-ConflictPath $ msDFSR-ConflictSizeInMb $ msDFSR-Enabled $ msDFSR-Flags $ msDFSR-Options $ msDFSR-RootFence $ msDFSR-DfsLinkTarget $ msDFSR-DeletedPath $ msDFSR-DeletedSizeInMb $ msDFSR-ReadOnly $ msDFSR-CachePolicy $ msDFSR-MinDurationCacheInMin $ msDFSR-MaxAgeInCacheInMin $ msDFSR-OnDemandExclusionFileFilter $ msDFSR-OnDemandExclusionDirectoryFilter $ msDFSR-Options2 $ msDFSR-StagingCleanupTriggerInPercent ) )",
"( 1.2.840.113556.1.5.223 NAME 'msPKI-PrivateKeyRecoveryAgent' SUP top STRUCTURAL MUST (userCertificate ) )",
"( 1.2.840.113556.1.5.178 NAME 'pKIEnrollmentService' SUP top STRUCTURAL MAY (cACertificate $ dNSHostName $ cACertificateDN $ certificateTemplates $ signatureAlgorithms $ enrollmentProviders $ msPKI-Enrollment-Servers $ msPKI-Site-Name ) )",
"( 1.2.840.113556.1.6.18.2.211 NAME 'msSFU30MailAliases' SUP top STRUCTURAL MAY (msSFU30Name $ msSFU30Aliases $ msSFU30NisDomain $ nisMapName ) )",
"( 1.2.840.113556.1.5.53 NAME 'typeLibrary' SUP top STRUCTURAL MAY (cOMClassID $ cOMInterfaceID $ cOMUniqueLIBID ) )",
"( 1.2.840.113556.1.6.13.4.8 NAME 'msDFSR-Topology' SUP top STRUCTURAL MAY (msDFSR-Extension $ msDFSR-Flags $ msDFSR-Options $ msDFSR-Options2 ) )",
"( 1.2.840.113556.1.5.237 NAME 'msDS-AzScope' SUP top STRUCTURAL MUST (msDS-AzScopeName ) MAY (description $ msDS-AzApplicationData $ msDS-AzObjectGuid $ msDS-AzGenericData ) )",
"( 1.2.840.113556.1.5.74 NAME 'categoryRegistration' SUP leaf STRUCTURAL MAY (localeID $ categoryId $ managedBy $ localizedDescription ) )",
"( 1.2.840.113556.1.5.11 NAME 'comConnectionPoint' SUP connectionPoint STRUCTURAL MUST (cn ) MAY (marshalledInterface $ moniker $ monikerDisplayName ) )",
"( 1.2.840.113556.1.5.93 NAME 'linkTrackOMTEntry' SUP leaf STRUCTURAL MAY (birthLocation $ oMTIndxGuid $ currentLocation $ timeRefresh $ oMTGuid ) )",
"( 1.2.840.113556.1.5.10 NAME 'classRegistration' SUP leaf STRUCTURAL MAY (cOMInterfaceID $ cOMProgID $ cOMCLSID $ cOMTreatAsClassId $ cOMOtherProgId $ implementedCategories $ requiredCategories $ managedBy ) )",
"( 1.2.840.113556.1.5.148 NAME 'siteLinkBridge' SUP top STRUCTURAL MUST (siteLinkList ) )",
"( 1.2.840.113556.1.5.81 NAME 'rpcServer' SUP rpcEntry STRUCTURAL MAY (rpcNsObjectID $ rpcNsCodeset $ rpcNsEntryFlags ) )",
"( 1.2.840.113556.1.3.46 NAME 'mailRecipient' SUP top AUXILIARY MUST (cn ) MAY (telephoneNumber $ userCertificate $ info $ garbageCollPeriod $ msExchAssistantName $ msExchLabeledURI $ showInAddressBook $ userCert $ legacyExchangeDN $ msDS-PhoneticDisplayName $ msDS-GeoCoordinatesAltitude $ msDS-GeoCoordinatesLatitude $ msDS-GeoCoordinatesLongitude $ userSMIMECertificate $ textEncodedORAddress $ secretary $ labeledURI ) )",
"( 1.2.840.113556.1.5.1 NAME 'securityObject' SUP top ABSTRACT MUST (cn ) )",
"( 1.2.840.113556.1.5.20 NAME 'leaf' SUP top ABSTRACT )",
"( 1.2.840.113556.1.5.151 NAME 'intellimirrorSCP' SUP serviceAdministrationPoint STRUCTURAL MAY (netbootMachineFilePath $ netbootAllowNewClients $ netbootLimitClients $ netbootMaxClients $ netbootCurrentClientCount $ netbootAnswerRequests $ netbootAnswerOnlyValidClients $ netbootNewMachineNamingPolicy $ netbootNewMachineOU $ netbootIntelliMirrorOSes $ netbootTools $ netbootLocallyInstalledOSes $ netbootServer ) )",
"( 1.2.840.113556.1.6.13.4.1 NAME 'msDFSR-LocalSettings' SUP top STRUCTURAL MAY (msDFSR-Version $ msDFSR-Extension $ msDFSR-Flags $ msDFSR-Options $ msDFSR-Options2 $ msDFSR-CommonStagingPath $ msDFSR-CommonStagingSizeInMb $ msDFSR-StagingCleanupTriggerInPercent ) )",
"( 1.2.840.113556.1.5.186 NAME 'mS-SQL-SQLRepository' SUP top STRUCTURAL MAY (mS-SQL-Name $ mS-SQL-Contact $ mS-SQL-Build $ mS-SQL-Status $ mS-SQL-Version $ mS-SQL-Description $ mS-SQL-InformationDirectory ) )",
"( 2.5.6.8 NAME 'organizationalRole' SUP top STRUCTURAL MUST (cn ) MAY (l $ st $ street $ ou $ postalAddress $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ telephoneNumber $ telexNumber $ teletexTerminalIdentifier $ facsimileTelephoneNumber $ x121Address $ internationalISDNNumber $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ roleOccupant $ seeAlso ) )",
"( 2.5.20.1 NAME 'subSchema' SUP top STRUCTURAL MAY (extendedClassInfo $ extendedAttributeInfo $ dITContentRules $ attributeTypes $ objectClasses $ modifyTimeStamp ) )",
"( 1.2.840.113556.1.5.284 NAME 'msDS-DeviceRegistrationService' SUP top STRUCTURAL MUST (msDS-IsEnabled $ msDS-DeviceLocation ) MAY (msDS-IssuerCertificates $ msDS-RegistrationQuota $ msDS-MaximumRegistrationInactivityPeriod $ msDS-IssuerPublicCertificates $ msDS-CloudIssuerPublicCertificates $ msDS-CloudIsEnabled ) )",
"( 1.2.840.113556.1.5.84 NAME 'displaySpecifier' SUP top STRUCTURAL MAY (iconPath $ creationWizard $ contextMenu $ adminPropertyPages $ shellPropertyPages $ classDisplayName $ adminContextMenu $ shellContextMenu $ attributeDisplayNames $ treatAsLeaf $ createDialog $ createWizardExt $ scopeFlags $ queryFilter $ extraColumns $ adminMultiselectPropertyPages ) )",
"( 1.2.840.113556.1.5.212 NAME 'msWMI-ShadowObject' SUP top STRUCTURAL MUST (msWMI-TargetObject ) )",
"( 1.2.840.113556.1.5.59 NAME 'fileLinkTrackingEntry' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.4.2161 NAME 'msAuthz-CentralAccessPolicies' SUP top STRUCTURAL )",
"( 1.2.840.113556.1.5.161 NAME 'mSMQQueue' SUP top STRUCTURAL MAY (mSMQQueueType $ mSMQJournal $ mSMQBasePriority $ mSMQLabel $ mSMQAuthenticate $ mSMQPrivacyLevel $ mSMQOwnerID $ mSMQTransactional $ mSMQQueueQuota $ mSMQQueueJournalQuota $ mSMQQueueNameExt $ mSMQLabelEx $ MSMQ-SecuredSource $ MSMQ-MulticastAddress ) )",
"( 1.2.840.113556.1.5.193 NAME 'msCOM-Partition' SUP top STRUCTURAL MAY (msCOM-ObjectId ) )",
"( 1.2.840.113556.1.5.118 NAME 'ipsecFilter' SUP ipsecBase STRUCTURAL )",
"( 2.5.6.2 NAME 'country' SUP top STRUCTURAL MUST (c ) MAY (searchGuide $ co ) )",
"( 1.2.840.113556.1.5.97 NAME 'physicalLocation' SUP locality STRUCTURAL MAY (managedBy ) )",
"( 1.2.840.113556.1.3.30 NAME 'computer' SUP user STRUCTURAL MAY (cn $ networkAddress $ localPolicyFlags $ defaultLocalPolicyObject $ machineRole $ location $ netbootInitialization $ netbootGUID $ netbootMachineFilePath $ siteGUID $ operatingSystem $ operatingSystemVersion $ operatingSystemServicePack $ operatingSystemHotfix $ volumeCount $ physicalLocationObject $ dNSHostName $ policyReplicationFlags $ managedBy $ rIDSetReferences $ catalogs $ netbootSIFFile $ netbootMirrorDataFile $ msDS-AdditionalDnsHostName $ msDS-AdditionalSamAccountName $ msDS-ExecuteScriptPassword $ msDS-KrbTgtLink $ msDS-RevealedUsers $ msDS-NeverRevealGroup $ msDS-RevealOnDemandGroup $ msDS-RevealedList $ msDS-AuthenticatedAtDC $ msDS-isGC $ msDS-isRODC $ msDS-SiteName $ msDS-PromotionSettings $ msTPM-OwnerInformation $ msTSProperty01 $ msTSProperty02 $ msDS-IsUserCachableAtRodc $ msDS-HostServiceAccount $ msTSEndpointData $ msTSEndpointType $ msTSEndpointPlugin $ msTSPrimaryDesktopBL $ msTSSecondaryDesktopBL $ msTPM-TpmInformationForComputer $ msDS-GenerationId $ msImaging-ThumbprintHash $ msImaging-HashAlgorithm $ netbootDUID $ msSFU30Name $ msSFU30Aliases $ msSFU30NisDomain $ nisMapName ) )",
"( 1.3.6.1.1.1.2.8 NAME 'nisNetgroup' SUP top STRUCTURAL MUST (cn ) MAY (description $ msSFU30Name $ msSFU30NisDomain $ msSFU30NetgroupHostAtDomain $ msSFU30NetgroupUserAtDomain $ memberNisNetgroup $ nisNetgroupTriple $ nisMapName ) )",
"( 1.2.840.113556.1.5.153 NAME 'nTFRSMember' SUP top STRUCTURAL MAY (fRSUpdateTimeout $ fRSServiceCommand $ serverReference $ fRSRootSecurity $ fRSExtensions $ frsComputerReference $ fRSControlDataCreation $ fRSControlInboundBacklog $ fRSControlOutboundBacklog $ fRSFlags $ fRSPartnerAuthLevel ) )",
"( 2.5.6.12 NAME 'applicationEntity' SUP top STRUCTURAL MUST (cn $ presentationAddress ) MAY (l $ o $ ou $ supportedApplicationContext $ seeAlso ) )",
"( 2.5.6.11 NAME 'applicationProcess' SUP top STRUCTURAL MUST (cn ) MAY (l $ ou $ seeAlso ) )",
"( 1.2.840.113556.1.5.279 NAME 'msDS-ValueType' SUP top STRUCTURAL MUST (msDS-ClaimValueType $ msDS-ClaimIsValueSpaceRestricted $ msDS-ClaimIsSingleValued $ msDS-IsPossibleValuesPresent ) )",
"( 1.2.840.113556.1.5.204 NAME 'msWMI-UnknownRangeParam' SUP msWMI-RangeParam STRUCTURAL MUST (msWMI-NormalizedClass $ msWMI-TargetObject ) )",
"( 1.2.840.113556.1.5.66 NAME 'domain' SUP top ABSTRACT MUST (dc ) )",
"( 2.5.6.13 NAME 'dSA' SUP applicationEntity STRUCTURAL MAY (knowledgeInformation ) )",
"( 1.2.840.113556.1.5.120 NAME 'ipsecISAKMPPolicy' SUP ipsecBase STRUCTURAL )"
],
"objectGUID": [
{
"encoded": "sr4GScorekOq9Mmm+aY8Ow==",
"encoding": "base64"
}
],
"systemFlags": [
"134217728"
],
"uSNChanged": [
"5"
],
"uSNCreated": [
"5"
],
"whenChanged": [
"20130521164433.0Z"
],
"whenCreated": [
"20130521164433.0Z"
]
},
"schema_entry": "CN=Aggregate,CN=Schema,CN=Configuration,DC=AD2012,DC=LAB",
"type": "SchemaInfo"
}
"""
ad_2012_r2_dsa_info = """
{
"raw": {
"configurationNamingContext": [
"CN=Configuration,DC=AD2012,DC=LAB"
],
"currentTime": [
"20141111080100.0Z"
],
"defaultNamingContext": [
"DC=AD2012,DC=LAB"
],
"dnsHostName": [
"WIN1.AD2012.LAB"
],
"domainControllerFunctionality": [
"6"
],
"domainFunctionality": [
"6"
],
"dsServiceName": [
"CN=NTDS Settings,CN=WIN1,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=AD2012,DC=LAB"
],
"forestFunctionality": [
"6"
],
"highestCommittedUSN": [
"22591"
],
"isGlobalCatalogReady": [
"TRUE"
],
"isSynchronized": [
"TRUE"
],
"ldapServiceName": [
"AD2012.LAB:win1$@AD2012.LAB"
],
"namingContexts": [
"DC=AD2012,DC=LAB",
"CN=Configuration,DC=AD2012,DC=LAB",
"CN=Schema,CN=Configuration,DC=AD2012,DC=LAB",
"DC=DomainDnsZones,DC=AD2012,DC=LAB",
"DC=ForestDnsZones,DC=AD2012,DC=LAB"
],
"rootDomainNamingContext": [
"DC=AD2012,DC=LAB"
],
"schemaNamingContext": [
"CN=Schema,CN=Configuration,DC=AD2012,DC=LAB"
],
"serverName": [
"CN=WIN1,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=AD2012,DC=LAB"
],
"subschemaSubentry": [
"CN=Aggregate,CN=Schema,CN=Configuration,DC=AD2012,DC=LAB"
],
"supportedCapabilities": [
"1.2.840.113556.1.4.800",
"1.2.840.113556.1.4.1670",
"1.2.840.113556.1.4.1791",
"1.2.840.113556.1.4.1935",
"1.2.840.113556.1.4.2080",
"1.2.840.113556.1.4.2237"
],
"supportedControl": [
"1.2.840.113556.1.4.319",
"1.2.840.113556.1.4.801",
"1.2.840.113556.1.4.473",
"1.2.840.113556.1.4.528",
"1.2.840.113556.1.4.417",
"1.2.840.113556.1.4.619",
"1.2.840.113556.1.4.841",
"1.2.840.113556.1.4.529",
"1.2.840.113556.1.4.805",
"1.2.840.113556.1.4.521",
"1.2.840.113556.1.4.970",
"1.2.840.113556.1.4.1338",
"1.2.840.113556.1.4.474",
"1.2.840.113556.1.4.1339",
"1.2.840.113556.1.4.1340",
"1.2.840.113556.1.4.1413",
"2.16.840.1.113730.3.4.9",
"2.16.840.1.113730.3.4.10",
"1.2.840.113556.1.4.1504",
"1.2.840.113556.1.4.1852",
"1.2.840.113556.1.4.802",
"1.2.840.113556.1.4.1907",
"1.2.840.113556.1.4.1948",
"1.2.840.113556.1.4.1974",
"1.2.840.113556.1.4.1341",
"1.2.840.113556.1.4.2026",
"1.2.840.113556.1.4.2064",
"1.2.840.113556.1.4.2065",
"1.2.840.113556.1.4.2066",
"1.2.840.113556.1.4.2090",
"1.2.840.113556.1.4.2205",
"1.2.840.113556.1.4.2204",
"1.2.840.113556.1.4.2206",
"1.2.840.113556.1.4.2211",
"1.2.840.113556.1.4.2239",
"1.2.840.113556.1.4.2255",
"1.2.840.113556.1.4.2256"
],
"supportedExtension": [
"1.3.6.1.4.1.1466.20037",
"1.3.6.1.4.1.1466.101.119.1",
"1.2.840.113556.1.4.1781",
"1.3.6.1.4.1.4203.1.11.3",
"1.2.840.113556.1.4.2212"
],
"supportedLDAPPolicies": [
"MaxPoolThreads",
"MaxPercentDirSyncRequests",
"MaxDatagramRecv",
"MaxReceiveBuffer",
"InitRecvTimeout",
"MaxConnections",
"MaxConnIdleTime",
"MaxPageSize",
"MaxBatchReturnMessages",
"MaxQueryDuration",
"MaxTempTableSize",
"MaxResultSetSize",
"MinResultSets",
"MaxResultSetsPerConn",
"MaxNotificationPerConn",
"MaxValRange",
"MaxValRangeTransitive",
"ThreadMemoryLimit",
"SystemMemoryLimitPercent"
],
"supportedLDAPVersion": [
"3",
"2"
],
"supportedSASLMechanisms": [
"GSSAPI",
"GSS-SPNEGO",
"EXTERNAL",
"DIGEST-MD5"
]
},
"type": "DsaInfo"
}
"""
| 149.432602
| 3,195
| 0.667855
| 48,192
| 333,683
| 4.624087
| 0.068144
| 0.024484
| 0.044022
| 0.096749
| 0.713508
| 0.701931
| 0.688199
| 0.682823
| 0.674983
| 0.640444
| 0
| 0.220969
| 0.168723
| 333,683
| 2,232
| 3,196
| 149.499552
| 0.582411
| 0.002317
| 0
| 0.024921
| 0
| 0.899411
| 0.999758
| 0.389483
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.018577
| 0.001359
| 0
| 0.001359
| 0.018577
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e039927879d58fb6b0c764e464548a63990388f6
| 97
|
py
|
Python
|
helpers.py
|
ToddKingham/simon
|
92bc8a3498fe7aa8acde1aefdf65e5af6773870e
|
[
"CC0-1.0"
] | null | null | null |
helpers.py
|
ToddKingham/simon
|
92bc8a3498fe7aa8acde1aefdf65e5af6773870e
|
[
"CC0-1.0"
] | null | null | null |
helpers.py
|
ToddKingham/simon
|
92bc8a3498fe7aa8acde1aefdf65e5af6773870e
|
[
"CC0-1.0"
] | null | null | null |
from time import time as now
def has_expired(start, expiry):
return now() > start + expiry
| 16.166667
| 33
| 0.701031
| 15
| 97
| 4.466667
| 0.733333
| 0.328358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216495
| 97
| 5
| 34
| 19.4
| 0.881579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
0edc3baebd21fbd8dfb6879148b7b10683124ea6
| 8,190
|
py
|
Python
|
data/half_dataset.py
|
ckxy/Controllable_Multi-Textures_Expansion
|
adfbbf807fd8812475653e63370976fabfb3c955
|
[
"MIT"
] | 1
|
2021-04-19T07:01:49.000Z
|
2021-04-19T07:01:49.000Z
|
data/half_dataset.py
|
ckxy/Controllable_Multi-Textures_Expansion
|
adfbbf807fd8812475653e63370976fabfb3c955
|
[
"MIT"
] | null | null | null |
data/half_dataset.py
|
ckxy/Controllable_Multi-Textures_Expansion
|
adfbbf807fd8812475653e63370976fabfb3c955
|
[
"MIT"
] | null | null | null |
import os.path
from data.base_dataset import BaseDataset, get_transform
from data.image_folder import make_dataset
from PIL import Image
import random
import torch
import itertools
class N2Dataset(BaseDataset):
def initialize(self, opt):
self.opt = opt
dir = os.path.join(opt.dataroot, 'train' if opt.isTrain else 'test')
self.paths = make_dataset(dir)
self.paths = sorted(self.paths)
self.opt.n_pic = len(self.paths)
assert self.opt.n_pic > 1
print('#n_pic = %d' % len(self.paths))
dir = os.path.join(opt.dataroot, 'trans')
self.trans_paths = make_dataset(dir)
self.trans_paths = sorted(self.trans_paths)
self.fineSize = opt.fineSize
self.transform = get_transform(opt)
self.l = []
for i in itertools.product([i for i in range(self.opt.n_pic)], repeat=2):
self.l.append(i)
print(self.l)
self.t = []
for i in itertools.permutations([i for i in range(self.opt.n_pic)], 2):
self.t.append(i)
print(self.t)
def __getitem__(self, index):
if self.l[index][0] == self.l[index][1]:
B_path = self.paths[self.l[index][1]]
B_img = Image.open(B_path).convert('RGB')
if self.opt.isTrain and not self.opt.no_flip:
if random.random() > 0.5:
B_img = B_img.transpose(Image.FLIP_LEFT_RIGHT)
w, h = B_img.size
rw = random.randint(0, w - self.fineSize)
rh = random.randint(0, h - self.fineSize)
B_img = B_img.crop((rw, rh, rw + self.fineSize, rh + self.fineSize))
w, h = B_img.size
rw = random.randint(0, int(w / 2))
rh = random.randint(0, int(h / 2))
A_img = B_img.crop((rw, rh, int(rw + w / 2), int(rh + h / 2)))
A_star = A_img
else:
B_path = self.trans_paths[self.t.index(self.l[index])]
A_path = self.paths[self.l[index][0]]
B_img = Image.open(B_path).convert('RGB')
A_img = Image.open(A_path).convert('RGB')
if self.opt.isTrain and not self.opt.no_flip:
if random.random() > 0.5:
B_img = B_img.transpose(Image.FLIP_LEFT_RIGHT)
A_img = A_img.transpose(Image.FLIP_LEFT_RIGHT)
w, h = B_img.size
rw = random.randint(0, w - self.fineSize)
rh = random.randint(0, h - self.fineSize)
B_img = B_img.crop((rw, rh, rw + self.fineSize, rh + self.fineSize))
A_img = A_img.crop((rw, rh, rw + self.fineSize, rh + self.fineSize))
w, h = A_img.size
rw = random.randint(0, int(w / 2))
rh = random.randint(0, int(h / 2))
A_img = A_img.crop((rw, rh, int(rw + w / 2), int(rh + h / 2)))
A_star = B_img.crop((rw, rh, int(rw + w / 2), int(rh + h / 2)))
B_label_short = torch.LongTensor(1).zero_()
B_label_short[0] = self.l[index][1]
A_label_short = torch.LongTensor(1).zero_()
A_label_short[0] = self.l[index][0]
B_img = self.transform(B_img)
A_img = self.transform(A_img)
A_star = self.transform(A_star)
return {'A': A_img, 'B': B_img, 'A_star': A_star,
'A_label': A_label_short, 'B_label': B_label_short}
def __len__(self):
return len(self.l)
def name(self):
return 'N2Dataset'
class N2TestDataset(BaseDataset):
def initialize(self, opt):
self.opt = opt
self.dir = os.path.join(opt.dataroot, 'train')
self.paths = make_dataset(self.dir)
self.paths = sorted(self.paths)
self.fineSize = opt.fineSize
self.transform = get_transform(opt)
self.l = []
for i in range(len(self.paths)):
self.l.append((i, i))
print(self.l)
def __getitem__(self, index):
B_path = self.paths[self.l[index][1]]
A_path = B_path
B_img = Image.open(B_path).convert('RGB')
w, h = B_img.size
rw = random.randint(0, w - self.fineSize)
rh = random.randint(0, h - self.fineSize)
B_img = B_img.crop((rw, rh, rw + self.fineSize, rh + self.fineSize))
A_img = B_img
B_img = self.transform(B_img)
A_img = self.transform(A_img)
A_path = os.path.splitext(A_path)[0] + '->' + os.path.splitext(B_path[len(self.dir) + 1:])[0]
return {'A': A_img, 'B': B_img, 'A_path': A_path}
def __len__(self):
return len(self.l)
def name(self):
return 'N2TestDataset'
class _2NDataset(BaseDataset):
def initialize(self, opt):
self.opt = opt
dir = os.path.join(opt.dataroot, 'train' if opt.isTrain else 'test')
self.paths = make_dataset(dir)
self.paths = sorted(self.paths)
self.opt.n_pic = len(self.paths)
assert self.opt.n_pic > 1
print('#n_pic = %d' % len(self.paths))
self.fineSize = opt.fineSize
self.transform = get_transform(opt)
self.l = []
for i in range(self.opt.n_pic):
self.l.append((i, i))
for i in range(self.opt.n_pic):
self.l.append((-1, i))
print(self.l)
def __getitem__(self, index):
if self.l[index][0] == self.l[index][1]:
B_path = self.paths[self.l[index][1]]
B_img = Image.open(B_path).convert('RGB')
if self.opt.isTrain and not self.opt.no_flip:
if random.random() > 0.5:
B_img = B_img.transpose(Image.FLIP_LEFT_RIGHT)
w, h = B_img.size
rw = random.randint(0, w - self.fineSize)
rh = random.randint(0, h - self.fineSize)
B_img = B_img.crop((rw, rh, rw + self.fineSize, rh + self.fineSize))
w, h = B_img.size
rw = random.randint(0, int(w / 2))
rh = random.randint(0, int(h / 2))
A_img = B_img.crop((rw, rh, int(rw + w / 2), int(rh + h / 2)))
B_label_short = torch.FloatTensor(self.opt.n_pic).zero_()
B_label_short[self.l[index][1]] = 1
A_label_short = B_label_short
A_star = A_img
else:
dir = os.path.join(self.opt.dataroot, '2n_trans')
dir = os.path.join(dir, str(self.l[index][1]))
trans_paths = make_dataset(dir)
trans_paths = sorted(trans_paths)
# bi = random.randint(0, len(trans_paths) - 1)
# B_path = trans_paths[bi]
B_path = trans_paths[0]
name = os.path.splitext(os.path.basename(B_path))[0]
A_path = self.paths[int(name)]
B_img = Image.open(B_path).convert('RGB')
A_img = Image.open(A_path).convert('RGB')
if self.opt.isTrain and not self.opt.no_flip:
if random.random() > 0.5:
B_img = B_img.transpose(Image.FLIP_LEFT_RIGHT)
A_img = A_img.transpose(Image.FLIP_LEFT_RIGHT)
w, h = B_img.size
rw = random.randint(0, w - self.fineSize)
rh = random.randint(0, h - self.fineSize)
B_img = B_img.crop((rw, rh, rw + self.fineSize, rh + self.fineSize))
A_img = A_img.crop((rw, rh, rw + self.fineSize, rh + self.fineSize))
w, h = A_img.size
rw = random.randint(0, int(w / 2))
rh = random.randint(0, int(h / 2))
A_img = A_img.crop((rw, rh, int(rw + w / 2), int(rh + h / 2)))
A_star = B_img.crop((rw, rh, int(rw + w / 2), int(rh + h / 2)))
B_label_short = torch.FloatTensor(self.opt.n_pic).zero_()
B_label_short[self.l[index][1]] = 1
A_label_short = torch.FloatTensor(self.opt.n_pic).zero_()
A_label_short[int(name)] = 1
B_img = self.transform(B_img)
A_img = self.transform(A_img)
A_star = self.transform(A_star)
return {'A': A_img, 'B': B_img, 'A_star': A_star,
'A_label': A_label_short, 'B_label': B_label_short}
def __len__(self):
return len(self.l)
def name(self):
return '2NDataset'
| 35.301724
| 101
| 0.550549
| 1,224
| 8,190
| 3.498366
| 0.07598
| 0.041102
| 0.062121
| 0.033396
| 0.822513
| 0.800327
| 0.768566
| 0.75759
| 0.722092
| 0.703877
| 0
| 0.015052
| 0.310501
| 8,190
| 231
| 102
| 35.454545
| 0.743226
| 0.008425
| 0
| 0.743017
| 0
| 0
| 0.020202
| 0
| 0
| 0
| 0
| 0
| 0.011173
| 1
| 0.067039
| false
| 0
| 0.039106
| 0.03352
| 0.173184
| 0.03352
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16317e2c1b823d66bdc2795fa3a70cee09fb46b8
| 202
|
py
|
Python
|
oscar/apps/shipping/models.py
|
owad/django-oscar
|
cfa69e37dc9abc97a7aff5c8616da319e1771008
|
[
"BSD-3-Clause"
] | 1
|
2022-03-17T19:26:13.000Z
|
2022-03-17T19:26:13.000Z
|
oscar/apps/shipping/models.py
|
aykut/django-oscar
|
ca3629e74ea1e0affc55d3de4e97f523e352d267
|
[
"BSD-3-Clause"
] | null | null | null |
oscar/apps/shipping/models.py
|
aykut/django-oscar
|
ca3629e74ea1e0affc55d3de4e97f523e352d267
|
[
"BSD-3-Clause"
] | 1
|
2019-03-23T10:26:02.000Z
|
2019-03-23T10:26:02.000Z
|
from django.db import models
from oscar.apps.shipping.abstract_models import AbstractOrderAndItemLevelChargeMethod
class OrderAndItemLevelChargeMethod(AbstractOrderAndItemLevelChargeMethod):
pass
| 28.857143
| 85
| 0.881188
| 17
| 202
| 10.411765
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084158
| 202
| 7
| 86
| 28.857143
| 0.956757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
163a021e6a1d77aba54f47c136ef8b6b9dcc7fdc
| 54,255
|
py
|
Python
|
fxpt/fx_texture_manager/resources_rc.py
|
theetcher/fxpt
|
d40c571885c7c4056f548a60140740c7beb8703d
|
[
"MIT"
] | 18
|
2015-11-02T08:02:16.000Z
|
2021-04-08T15:45:29.000Z
|
fxpt/fx_texture_manager/resources_rc.py
|
dseeni/fxpt
|
d40c571885c7c4056f548a60140740c7beb8703d
|
[
"MIT"
] | 4
|
2017-01-09T08:29:37.000Z
|
2020-04-15T06:04:33.000Z
|
fxpt/fx_texture_manager/resources_rc.py
|
dseeni/fxpt
|
d40c571885c7c4056f548a60140740c7beb8703d
|
[
"MIT"
] | 7
|
2016-09-01T08:38:53.000Z
|
2020-09-04T00:16:52.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Сб 19. ноя 23:58:07 2016
# by: The Resource Compiler for PySide (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore
qt_resource_data = "\x00\x00\x03\xc3\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x03\x8aIDATx\xda\xa5\x97\x7fH\x13a\x18\xc7\xbf\x9b\xa9c\xb6\xa5\xb323\x0a\xdbm\xa9-\x15\x0d\xc9P+J\xa2\xa4\xa8H\xb2?\xa4_\x14\x14\x14\x91\x15\x16IDaR)\x91P\xa0D\x85\x7fd\x14%\x85ET\xf4\x93\x14QQS\xd3\xed,\x8a~\x97f\x9b\x9a\xd3\x9a\xbdw\xedds\xbb\xbdw\xf3\x81\x97{\xdf\xf7\xee\xf9~?\xb7{\xef\xbdg\x0ax\x8f\xa0\x17\x8b\x13\xed\x5c'\xedIS09\x0c\xc1\xbf\xa0\xea(\xc4\x92\xe2O\x14\xf3\x83\x96#y\xfeBH\xd2QxM:z\x12\xbf\x0a\xf7\xf1\x13\x93\x0e\x97\xa0\xe5\xd8!\xb9\x10\x92uF\x01J\xe2\xf5SSt\x9a\xaf\xa6\xc3\xc7`;\x9d\xef\xa6\xa69P\x84\xd6\xc2\xa3R!xs\xa9:<@\x91):\x22m\xf2\xa4/\xa6\x83\x05\xe8;[\xe0Uu\xe2\xde\xe3h=u\x9c\x06\xf1\xdf\x5c\x86\x8eBH\x9a\xbb/\x1f\x03\xe7O\xf8\xbc5\xf5\xae#h+)\x12\x83\xf0K\x87\x03\x08#\x89=q\xbb\xf30x\xf1\x0c\xf5\xe1\xaa\xb6\xedG{i\xf1X\x08\xde\xdc\x0f\x0d\x1d\x0f@\x9a\x91\x08\xd4\xc6\xed\xdc\x03{E)U 8w7\xda/\x9c\x13 \xc0\x9b\xcb\xcf]@\x86f\xfe\x11\x906\x95\xb4h\x22\xf4,v\xfb.\x0c_+\xa3\x0a\x05n\xd8\x81\xd7\xe5\xe7\xf9\xbe\xdc\x1cb\x9eA\x86oI\xfb&\xbc\x05n\x101\x9b\xb7\xe3o\xd5\x15\xaa\xa0u\xc8\xc1\x1f\xb5AJ\xea\xb5\x01k6\xa1\xe3r\xb9\x9b9\x9c\x8bP\x08w\x88\xdc-pTWR\x85\xa5\x842+\x07\x1d\x15\x97<\xcc\xb9s\x1e\x1b\x91+\xc4\x9c\x8d\xb9\xc0\x83\x9b\xe3s\xcf\x5c\x87\xce\xab\x15^\xcd\xbd\x01xBd\xe7@\xf9\xf4\xae_\xde\x8eE+\xd1y\xbdR\xd4\x5c\x0c\xc0\x03\xc2\xb8v=&\xd4<\x94e\xfe'u\x19\xcc\xb7n\xf84\xf7\x05\xe0\x09\xb1z\x0d\x02\xeb\x9fI2\x1f\x9e\x9f\x01\xf3\xed*\xaa9\x0d@\x80\x98A\x00\xba\x8cY\xab\x10\xd4\x5c+\x09`(a\x01\xcc\xd5w8\x00=\x19~\x80\x8f\xef\x07\x15\x80\xdbd\x0c\xcbW@\xf5\xbaQ\xea\xaf\xcf\xc7`l\x12,\xf7\xefQ?`>\x1f\x01gn\x5c\x9a\x09UW\x9b,\xf3Q\x08\xfd\x5c\x98\x1f=\xf0\x09!\xba\x08\xf9;_\xbc\x04\xea\xf7\x16\xbf\xcc\x85\x18\x98i\x80\xe5\xc9c\xe9\x15\x91P\x17\x18\xd2\xd3\x11\xf2\xf9\xdd\xb8\xcc\x85\xe8\x8f\x9c\x05\xcb\xf3\xe7\xf4\x8aH\xa8\x0b\x98\xd4\x85\xd0t\x7f\xa6\x0a\xf7Xm\xfcQ\xa7\xd5P\xaf\xb5\x85G\x82\xady)^\x11\x8d\x9a\xa7\xa4@k\xed\xa6\x0aZ\xb5\xe1`\xeb\xea\xf8\xbe\xdc\x1c\xd1\x8a\x88INF\xe8\xa0\x8d*\xd4\xab\xd2\x80mh\xc0\xd6\xfa\xcels\xdf\xef~\xb2^\xee\xca\xcd\xf5\xa8\x88\x98\xc4D\x84\xfd\x1d\xa4\x0a\xfcT\x06\x83mn\xc6\x8eFsV\xbbu\xa0\x93L\x8d\x90\x16\xc5mVLB\x02\xc2\x1cv\xbaF\x80\x0alS\x93{E\xc4\xcc3A\xa7\x1c\xf1\x99\xd8\xe3P\x80}\xd5*\x98\xb7\x92\xa9\xafN\x80\xd1\x1dS\x8e\x8eGE\xc4\xc4\xc5 <8\xd0kR\xb7}\x18l{\xc7Xs\xe1v\xdd\xb6m):\xa2\x15\x11c4`\xb2F\xed\x96\xf4\xc36\x00\xd6l\x113\x87W\x08\x1f:\xd4\x8a\x88\xd1\xcf\xc6\x14](\x7f\xe2{O/\xd8\xae74sH\xd5\x91\x5c\x111\xfah~\x92\xedz+\xd5\xdcM'N\xab\x8e/K2V\xbb\xea\xc8\xae\x88\xb8\x09\x99\xe6\xae:\xd3\x08\x84\x89\x83p\xd1i\x81\x8c\x8a(\xcay\xfe\x93Ls!\xb8\xf7<\x82\xb4\xe9\xce\xf1G\xa7\x8e\xe4\x8a(\xc4\xd9\xef\xc78\xfe\x9e\xd3t\xfe\x01\x94\xf8)\x9b\xb4\x84\xcc\x9e\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x04\x1b\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x03\xe2IDATx\xda\xad\x97IL\x13Q\x18\xc7\xbf\xd7\x0aR\xa4,Av\x12\xb7\x10\x15\x0f^\xbc\x19\xdc\xa3B%\x88\x88\x07o\xde\x0dqK\x5c@\x08\xb2$\x1cL\x8cg\x13\x89z3\x0a.\x05/&\x12\x83\xbb\x89\x9a\xa8\x18@\x84\x98j\xcb\x22[[h;3~\xdfk\xa7\x99)C;\x9d\xf2o\xdeLg:\xf3\xdeo\xbe\xf9\xbe\xff{e\x00\xb0*\xbf\xd9\xde\xed\x11\x98\xcd/HxHmy\x99M\x0c\x92A|4\xd5Ry\x1c\x0f\x03\x90\xa0\x186kzc\xcf\xecLs\xb9\xee\x9b\x9e\x0fM@\xd5\xad\x97]\xee\x8ec\xb5x($\x0a\x90c\xa9\xb7\xbb<\xad\x15\xf0x\xe0/?\x11\xdc(\x84A\x11qW\xb5%\x1fX\xdd\x03\x90n\xd6@\xef\x8fq\xa8\xb9\xdd\xdf\xe7\xed\xa8>\x90H$h\xa8\x5cK\xfdS\xa7\xa7\xd5\x06\xcf\x06\x9d`b\x0cX\x04\x81\x84\x1fA\x92\xa0\xbc$\x0f\xd8\x99\x87\xf0\xb3\xe90l\xc8J\x05\xfb\x80\x0bj\xef\xbe\xfb\xe3m\xaf\x5c\x87\x97\xf9\x13\x06x><\x0e&\x13\xd3\x0a\x00\x88\xa2\x04\xfb7\xe5\x00;\xdb\x05s\x1d\x950:\xed\x85m\xb9V\x1e\x89\xda\xbbo\x1d\xee\xb6\xca\xf5F 8@\x0a\x02x\x11\xa0\xef\xd7D(\x02\xda\x00\xbb7\xac\xc5\x08t\x81t\xa3\x1a\xe6}\x01p\xb9}\xb0\x11#A9q\xe2\xde\xfb\xf1\xa9\xe6\x8a\xa2x!T\x00\xaf\xc6&\x81E\x01\xd8\xb9.\x1bN\xdd\xff\x04\x9d\xfd#\xc1;%J\x0e\x89\xe7\x84\xb5\xb1\x07\xe6[lYxv\xda0\xc0\xbb\xdf\xd3\x08\xb04\x07I8\x0e\x14ZWCq\x86E\xb3#LdXh?\x92\x87_]\x86\x01>:f\x96\x05\xe0\x0e!I|\xcf\x14\xe7h\xb3\xa3(\x13\xcc\x97\x9e\x80%\xc9\x1c\xbeN\x16\xf7\x0d\x13s\xe1\xeb)\xd6z=*\x80/\xce\xd9\xf0\xc9\xe5\xa4\xb4)y\xa0\xed\xf9\x19\xc0\xce?\x02\xe9z\x95\xe6=\xdc7:\xdfh&\xaa\x0a\xe0\xdb\xf8\x5cL\x00-\x98\xd2\x1c+\xb0s\xdd\xf0\xa2\xae\x0c\xdc>\x01\x04Q\xd4\xf6\x8d;o\x1dX\xb2*\x08\x15\xc0\xe0\xa4[\x15b=\x00tmI\xf6\x1a\x8c@7\xbc>\xb3\x0b<~\x01\x02bT\xdfPA\x04\x01\xae @\x9b\x0dF\xfeyt\x0e\xad\x16uN\x11\xf8paO\x18@\xafo\xa8\x00~\xcfxcLEKE\x1dPe\x10\xc0\xe7\x8b{\xf9+\x90\x01\xf4\xf8\x86\x0a\xe0\xef\xfc\x02Hq\x12P\xd5\xe4\xa7\xa5p\x80\xaf\x97\xf7\xa9\x00\xf4\xf8\x86\x0a`\xd2\xe3S\x95\x90>\x00\x06\xd9\xa9\xc9\x08\xd0\x05?\xea\x0f\x22@\x00\x93P\x8a\xe9\x1b\xa9\x0dv\xf0\xb6\x1d\xc9S\x01\xcc\xfb\x8c\xcd\xaci\xc9f\x0e0|\xf5\x10\x0f5\x01\xc4\xf2\x0d\x0b\x02,\x84\x01BU\xf0\xf0\x9bC\xd3\x8a\x97\x93<\xc8\xb1\xd2B\x0e0\x8a\xd9.\x03(\xaf\x09\x7fW\xf8\x86\x0a@\x9e\x0d\xbb\xbf\xff\x013\x01\xe8$\xa0\xfe\xa8\xdc\x8en-\xe0\x00\x0e\x5c\xd4\xccE\x00D\x02\x93\xc87\x22\x00\xecNZ\x90\x10\x00\x0b\xbdW}\x00\xc1\x10\xcb\x00.|\x88\xb9\xc5\xe8\x00\xb2o\xa8\x00pI\xe6\x8cgI\xa6%\xaa\x82)\xcc\xa3h\x11\x90E\xbe!O^\x1c\xa0\xa0\xd9>\xe4\x16\x98\xd5O7\xc6\xa8\x02\xfa\x99\xca,\x10\x10\x82Sd(c6\x17X\xe1\xcd\xe92^\x05\xd1\xc6\x97}C\x09\x90IP\xd8\xb2@\x9f\x0b'a\x0d\xf7\xceb\xc4\xa8l#\xe5\x17\xc4\xa8f&\xfb\x86\x12 \x09\xdb\x1al\xc9:\xa3\x9d\x855<\xe0n\xa9\x08>-\x0d\xc7\x93\x11B\xe5'E]\xd9\xcb\xbe\xa1\x04\x88W\xe1\xb2u\xfbE\x9e\x88\xf4T\xf1\xf8\x17\xf9\xc6\x8a\x00$\xe2\x1b\xca*\x88\x1b`%|Ci\xc5\x06\x00\x12\xf7\x8d\xd4\x86\x1e\x04\xb0\x19\x03X\x09\xdf\xc8h\xea\x85\xd9k\x15\xc6\x00\xd07\x06\xd17\xd2\xf5\xf8\xc6\x12a\xb4\x92p\xa1\x9a\xb28\xdd\xef\xea8Ym\x04 ^\xdf\xd0\x12\xfd\x97\x9c\xc06f\xa4\x83x}CK\x14\xb6\x05l\xde\xff\xad\xd3\x94\xae\x1f\x87QR\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x04#\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0w=\xf8\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x03\xa9IDATx\xda\xb4\x95\x7fLUe\x18\xc7?\xef9\x5c.p\x11\x89y\x85\x140\x97mV\x06l\xc0\xfa5\xd0p\x19\xba\x98k\xe2\xa6\xaeFmmm\xe5j\xd3\xb5T\x5c\x7f\xb5\xe8\x1f*\xdbjc\xabV\x22\xd75\x9b& K\xc2\xae\x0a7\x07\xc6\x1fHQ\x9bKI2\x05\x92\x1f^\xee\xbd\xe7^8\xf7\xe9\x0f\xce\xbd^\x18\xc8\xa5\xe5w\xfb\xee\xec<\xefs\xbe\xdf\xe7=\xe7y\xde\xa3D\x84\x85\xa0\x94\x9aq\x0bh1T\x80\x00\xe1\x18FE\x13X\x1c\x14\xa0\x03\x89\xab\xee\xe7!G\x12;t\x8dg\x95\xe21\x11z\x04Z\xc2a\xea\xfb\xaer%b\xa2bv\xb0\x0f\xa8\x01z\x80\xdd@\xc7\xac\x1dD\xc4\x93W:y8=\x95\x83\x9b\xd7\xa7e\x17\xe7\xa5dfg\xda\xb2\xc6\xbcf\xdf\xf9\x8b>\x7f\xcb\xd9\xf1?\xcd0\xfb#&ZLu\x95\xb4\x1c\x87?\xfa\xf2\x81v\xe0\x89Y\xd5k@\x22\x90\x9a\x9a\xc2\xae\xf2\xf5i\xb9\xa5\xc5\xa9NO\xf7\x84\xf9\xe6{\x7f\x7f\xd4\xda\xe1M+)rdn\xd9\xb0t\x95\xa6\xf1\x92\x95O\xacA\xe1)\x8f\x07\xd7\x9aG\xc1\xdd\x0ap\x01\xd8\xe8\xba\xf3\xfe5\xc0\x0e,I\xd0)+)r$\xb7\xfd\xe4\x0d5\xb9'\xbe\xee\xfe\xcd\xfc\xee\xe4\x19\xaf\xeb\x87\x0e\xefTI\x91\xc3\xa1`\xcb\x5c\x06Q\xb8\xca\x9eC\x1a\x8f\x03\xb4\xed\x12)\x9de\xe0\xd05\xd6f9m\xb9]=\xbe\x95\xff\x8c\xd1\x0c\xf8\x06Gh\xef\xec\xf1e/\xcbHxP)\xf2\xefj\x00pt\xeb6\x8c\xa3\x87\x01\xce\x85B\xa1\xd2\xc8\xc7\x05\x92\x95\xa6\xdb4E\x92\xdf\x08\xdb\xbd~F\x80\xa9@\x10\xc3o\x84\xed\x9a\x22Iiz\xa2\x95\xafb?\xb2\x9c\xaa\xde\xcb\xf8\xfb\x1f\xce0\xaa\xf8\xec\x10\x8e\xd7v\xd3\xdb\xdb[YPPp\xa9\xbc\x98\xea\x8c%T-\xd4n\x93&\x1f\x1c;G\xf5\x82m\xda\xf4\xfa[l\xf4\x19\xac\xa8z\xe5\xdb\xda\xda\xda7\xba\x1a\xf7V\xbd\xfc\xea\xe3<\x90\x932\xef3\xfd\x03~\xbe\xa8\xeb\xdc\x07\xbc\x1b\xd7\x1c|\xff\xf6;\xe4\xfdu\x9d\xa7v\xec\xfc\xd4s\x02Vd\xda\x08\x85&\x01\xf0\x5c\xf4q\xf6\xc2\x18\x1b\x9eL\xe7\xe9b\x070\xbd>i\x02\xa0\xc7e @\xc7\xa1O\xc8\x19\x1d%4\x09F \x18]\xbb\xf4{\x80\xb6\x8e\x1bd\xdcg\xa7p\xdd\x1d9\xcb_\x8b{\x92\x05\xe8>\x5c\xcfD\x1e\x0c\x0f\x8eF\xe3\xcb3\xec\x0c\xdf2X\x9e\xa1\xcf\x88O\x04\xfe\xc3Q\xa1\x01\xb7\x030<8\x12\x8d9\x1d:;\xb7\xae\xc6\xe9\x18bx\xd0\x8c\xc6o/\xd6@\xb7\xe8\x1c\x85\xcbW\x03\xe4diQ\x81g\xf2\xfc\x00\x18\x96\xe8\xc0\xcd0F\x90c\x80$\xc4[y\xfa\xb6rJ\xcb6\x91c\xb7q\xfe\xcc\x09\x9a\xbf\xf9q\xfe\x16\x9d\xa2q\x0a\xbe\x04\xccy\x0dr*6MW\xd3\xd4\x8a\x06,\xf3\x06I\xaf\xa84\xea\xb6o/\xeb\xec\xec\x14\xc06kP\xc3\xc0$\xe0\x03F\x81q\xc0\x9cs\x92\x97\x1e\xd8CI\xe3i\xb3\xa4\xe1H\xf4\x8c\x1eju\x93nz\x93\xea\xeb\xeb_\x04\x06\x80k@\x7f\x0c\xafY\xf1\x9b\x96\xb8\x01\x84\x11\x91\x08\x7f\x1e:}R\x9a\x0f\xec\x11\x11\xb9,\x22\xb9\x22r\xa3\xa9(_\x1a@\x8e\x804\x16\x16H0\x18\x14\xb7\xdb\xbd\x06H\x99\x83\xc9\xd6q\xa2[u\x11k\xb0_\xa6\xd1,\x22\xc9\x0d\xd3k\xd5r\xe5Wi\x80(e\xb8_D\xe4\xd6b:/\x82\x1a@\xb9\x94z\xde\xa5\x94\xd5\x0f|\xcc\xeaG\x02i\xeb\xd6F\x93\xae\x7f\xf59@8\xa6\xb0\xbb2\x9e\xa4:\xf9\xa5K\xda_\xd8,}5\x07\xadMJ\xe5\xffi\x90*\x22\x1e\x11\x09X\xd7\xf2x\xc5Ed\xc6?\xf9\x9e@\xe3\x1e\xe3\xdf\x01\x00\xd1\xf6.sL\xa4\xb3?\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x02x\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x02\x1aIDATx\xda\xa4S=o\x13A\x10}\xb3\xbbg\x07\x83Q\x08\x04\x0a\x9bD\x14H\xa1\xe0CIDK\x13h)(\x90\x10\xa2\x81\x22\x05\x0d\xff\x00\xd1\xd2\x80\x84\xac\x84\x86\x06\x946\x0dB\xa4AB\xa2L\xe0\xe2\x02d$\x07#$\x9b`\x82}\x96\xef\x9c\xcb\xdd.\xb3w\xce\x17\xd0 \x8ft\xba\xbd\x9d\x99\xf7f\xde\xcc\x911\x06\x83\x98\xc0\x80\xa6\xca\xf33\x8b:\x0a\xaf\x1a\x1d'\x17$$H\xaa2\x918\xc7_\xfb\xa3\xa9\x7fC\x84m\x9f\xb2\xc9\xe7g\x9f\x01[Q\x1a$%V\x9e\xdc8\xcb \x86\xe8\x0f\x00N\x22! T\xe6\x13\x13\x9dI\x00\x8c\xd6\x9c\xbc\x89\xb0\xf2\x10\xc6\x10\x9c\xc25L\xce>\xe7\xe6\xfa\xc9\x16d\x1b\xc8\xbe\x94\x82[\xba5!IL\xf1\xfd\xb2J\x1c\xa6\xcb\x8e\x03\xec\x97\x88\xd6_\xf1\xc5\xcb$\x96\x18\xc4\xc4\x1aA\xc7\xef\x97n\x90)\x5c\xe7x\x83\x8e\x1f^\xcc\x1f\xccZ\x00v\xc5>\x843\xc4Gi\x8b\xdca\xd6\xacK\xe5\x9d\x8b\xa0\xd5\xb1,\xec\xe1\x89\xd1}\x08\xa9\x90s\x9c\x12\xeb\xf6 \x05\xa0MK\x87\xa0\x1d \xe9;\xa5Cm\xa5\x82\xd1\xb1\x11\x1c\xbfY\x02\x12\x91M\xfaX\xbf\x93\x81;w\xfb\x98\xb2e\x22\xf2\xf0\xf9\xad\x8b\xee/oW/\xd6c|\xea4\x8e\x14\x8eb\xab\xfa\x88\xab\xd6I\xe9\x16@\xe6\x8a\x08\xc5\x04\x9a\xedpIYf\xbf\xbe\xc6\xca\x02\xd3\xf7\x16\xb8\x9d\xb8\xaf\x96D\xfc\xb5\x04mxU(\x93\x96\x9f\x00k\x06\x18G\xeb\xe3*\xbe\xac\xf7^(\x9e7:\xf5\x1a\x0e\x17y*\xdd*ts\x89CE\x0a\x22\xb3|\xda\xbbk\xac\x031\xd3\xd0!\xfc\xac\xba\xb8\xf3\xf8\xc3b\xd2B\xbb\xf1\x0d\xc5\xe9K\xdcJ\x1d\x94\xcds\xaa\xd8\x1d\xe1>c\x00g\x04~c\x0d\xdf\x1b\xcd\xd7\xab\xf33\x9e\xb2A\xc1F\x0b\xb9S'\x81^\x8b\x071\x8c\xbf6p\xafe\x87\xb1\xb1\xfc\x1e\xb5\x1f\xbd\x85\x13\xa3L\xe6\xce]\xf6x`y\x1d\xc5\xb0Kev6\xe6\xdff\x8b2$\xda\x93w\xdf\x8c\x95\x9f^\xf1l\xe4\x85D\xb1\xff3\xbb\xf7n\xba[\x03\xfe\xce\xbf\x05\x18\x00\xab\xcb\xbf\xa2\xea\xc2C\xa9\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x06\x09\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x05\x8fIDATx\xda\xac\x97Al\x1dG\x19\xc7\x7f\xdf\xcc\xec\xbe\xe7<\xbbu_\x94\xc4\xb5MR\x14\xa4F\xa8\x95JU(*Hp(T\xb4\x8a\xaa\x92\xa6\x88C+W \xd4\x0b\x08\xc4\x01\xa9 P\x84\xa8@\xe2\x00E\xdc\x90\xca\x81\x03\xa2IK\x09\xcd\x05\x82D)\x88Cz\x01\xd2\x02i]'\x8a\x93\xb8u\x12\xdb\xa9\xbdowv\xe6\xe3\xb0\x1b\xf7\xbdg\xfb\xd9I\xf8\xa4Y=i\xf7\xed\xfc\xf7\xfb\xfe\xdfof\x04pc\x87^\xfe\xedJ\x90\x87|P@\x19\x14\xd6\x08)\xf1\xa5K\xdf\xdf\xff(P\xf2\x7f\x88\x91\x9b\xbe{L\xaf%\xfex\xea]m}\xeb\x85\x17\x00{\xa3\x93;\xa0Y}9\xfc\xee\xdf\x17\x10\xa0\xbat\x85B\x04\x1e\xde7\x86|\xed\x08\xfa\xec\x01\x9e\x7f\xf2\x93\x8f\x1c\xe0\xc5\xe3\xd9\x8f\x1e\xb9\xffF2a\xaa\xe9*\x01\xa9\x15\x1a\xce\xd0\xb4\xb6g4\x9c!\xb5\xb5*#\xbc}y\x85\xcf\xdd\xbe\x83\xe7\xa7\xee\xfb\xd4\xd0\xd3G\xcf\x00\xc9\x8d\x08X\x8d\xc4\x18\x12kH\xac\xf4\x0dCj\xeaGE\xd8\xd1J9\xf9\xce\x15\x1e\xda\xb7\x93#O\xdc{k\xeb\xdbGg\xaeW\x84\xa1\xcbv\xce\x0a\xcel<\xae\x96c8u\xec\x19\x1db\xba\xce\xc4KS\x1f\x1fo\x7f\xef\xd8\xec\xf5\x88p\xbd\x19\x10Dd=\x0b\x10\xeb\xdfS\x9f\xf8 \xf2\xf5\x17+\x9f\xa8BT\xf4\xd9\x03\x14Qw\x00-`\xe1\xba\x058c\x11Y\xebA\x80(pv1\xe3\xb9\x83w\xf1\xdc\xc1\xbb\xd6\xdc/+#\xa77\x94\x01kdC\x01\x06\x98[\xce\xb9\xb0\x9c\xaf\xde\xd7\xfar\xcf\xc4(E\x88\xb4\xbe\xf3\xf2\x9c\x02\xaa\xda\xcb\x0d#\xef\x5c:\xf4\xe0$\xe0\x07\x97\xa0v\xbal\xa8Wz0\xd5=Q\xf4\x91\xf7~\xb8\x7f\xdd\x7f\x1d\x7fs~\xe7\xc3\xe5\xd1\x99\xe5\x1f\xec\xbf\xad_D_\x096\x13\xd0\xef\x0d\xe9V\xc3\x9fg.\xb2\x5c\x04B\x8ck\xb9\xf1\xf8\xbd\xe3\x07\xf4\xe8L\xf6L\xaf\x885m\xe8\x8c\xc1\x8aT\x80\x88\x11B@bDT\xb1\x22\xb8\xfa\x99\xab#YmOhX\xa1\xe9dcn<\xfe\xb1\xf1\xa1\xa7{[\xb6\x02Q\x8cx\xef\xd1\xd2C\xe9\x89eAYt\xf0y\x87\xa2\x93Qt2\x82\xcf\x89eA\xf4E\xf5\x5c(11`\xae\x16E\xa9\x19b6\xe3\xc6x77\x1c`\x09%Y\x96Qd+DUb\x8c\x84\xb2\xc4\xfb\x82\x10\x02\xc6\x18\x924\xc5Y\x87\xb1\x16\x11\xc1\x18\x83K\x1c\x8d$%\xc6\xb8\xea!\x17\xaa/\x8f]=\xbc\x1e7\x1es\xc7f/\x1dzp\xc2\x01V}\xce\xe2\xe2\x22\xef]\xb9B\x0c\x91\xa2(\xd8;\xd6f\x05e|\xf78\xbf\xf8\xd5a>r\xcf\xdd8\xe7*N\x88\xe0\x92\x84fs\x88dDV\x05t\x03k\xab\xdcp\xf7\xedk\x7f\xe5\xc8\x03\xa7\x98\xfe\xdb[\x98\xec]\xc2\xc8^\xee\xd8\xffU\xce\xff\xf7\x9f\x18\x0c\xa7g\xce\xf0\xa5\xa7\xbe\xcc\xbf\xfet\x18s\xfa8\xe5\xb6q\xf6lO\xf9\xf9o^\xe1\xb1o\xfe\x04\x89Cu7(\xceX\x9cQ\x04\xdd\x94\x1b!V\xdc0Qe\xcf\xd8\x03O1\xc1Y\xee\xfe@\x93\xfc\xf2YN^\x80\x95d\x92K\xec\xe2\xb2\x1d\xe3\xd4E8\x7f\xfaM>z\xdb\x10\xb7\xeaY&?{\x90\xe5\xe5\x15\x9c(\x89HO\x17\xd9z8+\xcc-\xe7\x9c8\xb7\xc0k\xf58qn\x81\x13\xb3\x0b\xf4\xe0_\x91%\x16Oqz\xfa$\x17\x97&\x98l\xa4\xbc\xf1\xeb)b\xe8`k'\x9f/-wN4\xf8\xc7\xf4\x0aK\xf3\xb3\xec\x9d\xfd\x0f!m\xf0\xea\x99\x8b\xb4\x16\x02_l\xdf\xdcS\x02\xd9\x027\xba9 \x94\x81\xa4\xd1\x02;B\xe6s\xee\xdc\x99aD\x08\xaa\x18@\xc5s\xa5\x04u#\xb8\xb4\x05!@T\x1c\x82\xe9\xca@\xb2F\xc0\x00n\xf4\x80\xc8g\x94\x11\xd2X\x12\xd5p9n[\x87\x81 \x94\x84\x08\xf8\x1c\xa2\x12\xfb\xb6o\xceVl\x90\x0d\xb6u\xba\x0e\xe4\x1c*\x90\xe7h\x00\x1fB\xe5\xd4\x8d\x22\x1aB\x00\xf5\x05\xa9\x15\xee\xdf\xbb\x93v\xbb\xbd*\xd1\xc9\xe0\x0c\xacQ\x038\x04\xf0\x05\xa5\x82\x8d\x0c\x14 \xa2\x04\x05\xefs\xe6\x162>\xf4\xcc\x1f\xa09\x02\xe9\x10\xb7O\x8c\x22\x02V@\xcc\xa0\xd5\xa4/\x03\x06\x091_\xc6\xab\x90\xc4\xb0\xaeQ\xde\x17P\xf5\xb7\xcfsv\xdd\x94r\xe2\x1b\x9ff\xb4}\x0b\xad\xd60I\x92\x10b\x85\xeb\x01\xf3#}\xf7\x9c\x08\xa5/s4F\xca\x10\xb7\x90\x01!\xf8\x1c\xd5\xf7\x99\x9fZ\xc1\x18!D\xc5Y\x19\xb8\xb3\x97>\x05N\x90\xe0\xb3\x82\x80\x10\xca8\xf0\x5c \x22\x84\x10\xf1E\x011\x82\x08\x82\xa9\xb0l\x0c\xce\x5c\xc7\xb6\x5c\x0dx\x9f\x11#D\x1d\xecA\x11\x08j\xc8\xf3\x1c\x05^\x99\x99gd4\xb0m\xdb\x0a\xd6%[\xf2\x9d\xaa\xf2\xf9\x0f\x8f\xaf\x9a\xc1\x99(\xe4YFP\xc1\x97~\x93W\x08Q\x94\xb2\xe8\x80F\x8cT5\xb4\xdd\x9b\xd6A\x02\x14B\x9f\x19\x1d\x06\xf2\xa2\x83\xd6\xab\xe0\xc0\xe9\x05\xb4\x84<\xef\xa0u\xa9\xa2\xea\xd55f\x0b\x02tM?8\xd4P\xe4\x19Q\xa1:\x1an\xe2\x01\x11\x0a\x9fA\x84\xcf\xd4\x1c\x18\x1e\xae\xba\xe0Z\x22\xb1BV\x91PC\x96e\xe4\xaa\xb8b\xf3\x13\x96\x0fJ\xd6\xc99\xb7\xd4\xe1\x8e\x9f\xbe\x0a\x8d!$i\x80q[\x9bY\x84\xc4\x08\xcd|\xe1\xafK\xa0\xee\xe6Vc\xd7\xee\xed\x0d\x96\x96\xda\xd8-\xb88\xc6\xc8\xee\xed)\x93\xcd\x5c;?\x9bz\x14\xe8\xd4{\xbc\xb8\xd5\xaf\xcf\xa0\x5c\x82y\xa0\xe3\xa6\xe7\x16\x7f\xff\xe3_\xfe\xe5\x0bs\x0b\xf9\xbc\x91\xcd_\xa2*2:\xdc\xb8\xe5\xef\xaf\x9f?\x0c\xbcV\x1fL\xc3\xb5\x08\xa8\x1b\xa2\x03d\xff\x1b\x00}@\xd9\x897\xfa\xfe6\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x06\x00\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x05\xc7IDATx\xda\xa5\x97\x7fLSW\x14\xc7\xbf\xaf\x85\x8ah\x11E@\xfc\x8d\x8b\xb8\xe9~\xc5i\x8cA[\x02\xea\x18\xa2\x80\xdamq\xce0\x7fMD(81[4\x1ad\x989c\xc4\xb19\x9d\x9a\xb1\xe9\xfe\xd8t\xd6\x19\x10\xcd\xe66\x8dl\xa2[b\xdc\xe6/\x14\xb5*\xf2Z\xcb\xf8Q~\x15\xde{;\xef\xb6`[[\xfb\xe2Nr\xdb\xd7\xf7\xce\xbd\xf7\xf3\xbe\xe7\xdcso9\x04\xb6\xa0\xd2\xd2R\x93J\xa5J\x95$\x89\xdd\xe08\x0e\xa2(\x96\xe7\xe4\xe4d\xd0\xcfn\x05c\xf85N\x81\x8f\xb6\xa4\xa4\xa4\xd9`0\xc8\x93\xb2\x1b\x04\x83\xc3\x87\x0e\xc1\x98\x97\x17F?[\xfe/@P\xd9\xfb3\x8fr\x90fK\xae\x09\x00\x09*\xd1\x81\xc61\x99H\x98>\x0d\x82\xd0EM\x80(+\xe0RA\x86\xe0TjT\xfd^\x8d\xf0\xda2\x88*M\xef\xfbp\xf4L\x02W\x91\xf9\xd1\x8f\xe9\x81\x14\x92{h\xcb\xd6%6g,\xca\x81@\x00\xd4\x91$&*\xea\xb7\xef\xb4\x0d\x0b\xe6\xa5\xe1\xcem3\x83\xf2\xee:j\xf4H\x1c>r\x0c\xcb\xf4\x83\xc8;\x88\xb1\xd1\x8b@M\x00\xa6\x83\xa5\xc8\xfc\xf8\xe7\x80\x0a\xc9\x00\x91\xfb\xdf\xd3[\xde*\xd8\x83k\xe5\x9b\x89>\x88\xc5X\x068n\xd7c\xe5\xb2L4\xd8l>;\x0f\x8a\x88\xc0\xee}_!\xa5\xff\xaf.\x00\x09\x92\xd8\x8dq\xa9\x1b\xf1\xcd\xb6w\xb1t\xfb\xe9(r\xb3\x06\x02\x88\xda\x9f\xaf\xe7\xdf,\xd8\x89\xeb\xe5\xdb\xc0\xa95\x0c@\xcd\x09\xf8\xdb1\x0e\xf7\xbb\xa2\xc0I\x82\xcf\xce\x12\xa7\xc6\xb0`\x0b\x9e\xd7\x5c\x83 \xa9\x9d\x00\x82\x03q\xa9\x05\xf8v[.\x96\xec8\x13Mn\x96\x80\x00\xfb\xd6\xe8\xf87\x8c\xdbq\xfdd\x09T\xea`9\x88LJ\x15\x04\xfa\x16\xd9\xb5g'\xa97{Ip\xf2`1\xa7F~\x94/q\xb3\x8c8\x5cb\xc4\xdb;\xce)\x03\xd8\x9b\xaf\xe3\x0d\xd9[P\xf3\xd3.\x02\xd0\x80%\x81\xdbd\xee\xce\xf2t\x1a\xae\x1bjI\xe8}\xf6\xc8CV\xa0\x1bcgf\xc3\xb4s\x0d\xe6\xed\xbc\xa4\x0c\xe0\x8b<\x1d\xbf \xab\x105\xa7\xf6\xba\x14\xf0\xbd:Id\x84p\x0e|\xd0\x90\x84\x1f\x9a\xe3\x00\xd1;\xc1]\xabD\xa2\xd5\xa4\xa6<\xa20jT\x22BT\xb04\x14\xa6\x0c'\x87.\xdf\x0a\x18u|\xfa\xf2\xf5\xb8q\xe6KZ^\xfe\x01\x829g.L4\xaf\x80T\xa4\x83R;u\xe3!\xd2\xca\xce\xd5\xb5\x16\xcf\x19\xed\x0d\xe1T W\xcf\xcf]\xb2\x167\xcf\x1e\xf4\x0b\xc0\xb9\x01\xbcb^\x0a\xa98\x11\xc7\xae\xd6;s\xc1\xdb\x9dD\x90+J\xda\xb3C\xc0\xe5~\x0f\xe9\x93\xf9\xa8\xbcf\xc5\xfc\xaf\xab\xeb\xda\xb7xB0\x80=\xa4@\xeab#j\xab\xbe#\x80 \xff\x00\xae\x9a2\xf9\xfe2\x02H\xc2\x89\x1a\x1e*\xf2\xe5\xbc\x08\xe4d\x14(\x14\xaf\x8d\x8d\x06\x97w\x04\xb5\x9b\x92\x11;0\x14\x15W-0\x1c8\xef\x01\xe1\x04\xc8\xd5\xf1\xc9\x0b\xb3p\xbb\xda\xa4\x08`J\xddr\x06p\xea\xa6\x95\xfc9_\x02P\xd9\x96\x90\xf4L$\xb8|\x13Z\xb6\xce\xc1\x9d\xc6vL\x88\xd22%\x0c\x07\xaa{\xc3\xc1\x00vS\x08f\x19\x96\xc0\xfcG9+D\x81\x00\xa6\xd6\xaf`\x00\xa7o?t)\xe0\x1b@\x1f;\x98\x140A*\xc9\x80\xdd\xd1\x0dK\xab\x03cH\x099'^?x\xc1J\x899\xcc\x09\x90\xa3\xe3\x93\xd2\x17\xe1\xee\xc5\x93\x8a\x00\xe2-Y,\x07~3\xdbX\xd1\xf2\x07\x10?*\x02\xef\x1c\xba\x88\xb2\xaa[\xce\x01\xe4\x15B\xf7\xe5\x9c\xd0n<\x0e{\xd1\xec\x81N\x80\xd5z>a\xae\x01w\xff\xfa\x85m0\xbe6Iw\x00\x9du\x15\x038\x7f\xaf\x91\xb1\xfaZ34\x0f\x86j\xfb`\xf8\x80\xbe>WF\xe8\x86\x0a\xb4\x17\xa7F3\x80\xcfW\xeb\xf8\xe9\xc9\xe9\xb8\x7f\xe5\xac\x22\x80\x04[6\x03\xf8\xb3\xae\xc9/\x80\xc4^\x98\xd5F\xb7\xaa\xe9\xfc\x984,\x1c}\x09\xa0\xa3\x07`\x17)0uF2\xea\xaf_`[i \x80\xc4\x86\xd5\x0c\xe0\x12\xdf\xdc\xfb\xcc\x9f\xb9\x17\xf1\x9e\x03\xcdKC\x06x\x02|\x96\xad\xe7\xa7\xe8\x13\xc1\xd7^t)\x80'\x02\xcch\xcca\x00\x97\xad-\x01\x01|\xc1\x8c\x8f\xd4z)\xb0J\xc7O\x8c\x9f\x06\xeb\x9d\x7f\x14)0\xab\xd9\xc8\x00jl\xad\x1e\x12+\x01\x90}\xc7F\xf4\xf3R`\x95\x9e\x7fq\xf2$4<\xb8A1U\x05T \xb9%\x8f\x01\xdc\xfa\xb7M\xe1\xd4\x9e&\x17\xa5\xbe\xeb\x09`\x8b\x1b\xc0\xf8\x97_@\x93\xc5\xcc\xb6\xe2@\x00)m\xf9\x90>L\xc4\xbd\xa6\xf6\xc7\xceI\x81L\x1eG^\x19\x1e\x00\x9f\x12@\xdc\x848\xd8m\xbc\xdf\x8d\xc8\x1d \xb5c\x0d\x03\xa8\xb7w\xf4\x1c\x11\x95\x03\xd0@C\xfa\x87x\x01d%\xf0\xb1q\xa3\xd0\xdeb\xf3\x1bQvzu\x01\xa4u\xaee\x00\xb66Gof+\x07\xe0\x10\x11\xaa\xf1\x02X\xa9\xe7G\x8c\x89Ag\x9b\xfd\x89\xd2\xf5(\x90\xdeU\xc0\x00\xec\x0eA\xd9\xac^\xd6_\xa3\xf6\x04(]\x99\xc0\xc7\x8c\x08\x87\xd8\xe5\xff\x04\xcd\x14\x90\x9c\xbbh\x86\xb0\x8e\xce\x03\x898r\xb9\xceg)\xf6g=\xc5i\xde\xf8\xa1\x1e\xab \xb2x\xf1TSX\xa8&\xfe\x89\xd2Q\xf7>b'\xbb^\x11U\x0c\xb1(\x09G\xaf<\xa0\x83\x0f\xe7/m\x1e\x07\x90\x8f\x8c\xf4\x91\xfe\x5c\x8cG)\xd6R\x1bIm\xb0\xfc\x92\x0a\xc6\x09\xd6l8Q\xd9Y\xf4*\x03\xe0\x5cqU\x06\xe0,\xcdN\x80\xe3\x040;\x9asM*\xef\x18!PVS\x06\x86m\xac\xbc\xdaT\x98\xacPx\xdf6`S%\x9a7\xa7D+\x0d\x9f\xbbE\xc5\x14V\xd4\xb4\x0a\x5cX\x97\xf8\xe8\xaf\x9ab#\xb5\x82\xe9\x10\x13\xd2\xd9Xe\xd9\xba0\xe3i\x00\xc2\xa9\xc5\xcaJ(T\xcc\x97\xc9\xd9\xfe\x90\x9a\xf9i\x06\xa0S+\xfaQ\xd3<\xe5\xe4\xb2\xc9\xb2uPk\xff\x0f#c\x7f\xae*X\xbc\xd3\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x03\x04\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0w=\xf8\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x02\x8aIDATx\xda\x94\xd6=\xaceS\x14\x07\xf0\xdf:\xf7\x18\xcc\x0b\x83Nt\x18\x05QH\xa8dB\x88\x104\xa2y\x89A\xa1\x19\x89\x02\xd10\x8djd\xa2\x9c\x99\xc4G^\x8c\x8f'\x11\x12\x12\x84hPh|\xc5\x8bF\xa21\xc1\xcb\x14\x12DL\xcc\xcc{gi\xd6y\xd9\xb9\xee\xbd\xee\xac\xe6\xae\xbd\xef\xca\xff\xbf\xd7Z\xfb\xbf\xd7\x89\x8d\x8d\x0dKXw\xf9\xd3\x07\xa2\xfc\xc4\xe8o\xe3*\xbc\x8d\x15\xec\xc7W\x98\x8c\xb1}D\xec\x8f\x88k\xe6!g\xe6/\x99y\xbc\xc0\xa2\x01\x1f\xed>\xdcP\xfej\x11\xec\xc4\xf4\x11q\x0c\x17\xcf#\x88\x88m|\x88_\xd1ck*d\xd2\x867\x99%t3N\xf4\x1f\x8e\xc6\x1fp\x19n\xc1\x9e\xa6d\xa6\xfc\x0bq3V\xfa\xcc|\x14WN\x05\xb6\xe0'3\xf3\xb7\xf2{\xbc\x89;\xf15\xee\xc2\xdfM\xfc?8\xafzr7\xde\xef3s\x1d\xbb\x16dp\xa62U\x04\xd7\x95\x7f#\xde-\xa2\xd1.\xc1\xab\x05\x0ew\xf4]\xd7}\x86\xeb\x17\x10\x9c\x18\x86\xe16\xfc\x89Sx\x0ao\x14\xd9\xbe*\xc5h\x8f\xe0\xfcf\xfdl\x8f\x9b\xb0{\x01\xc1\xa5\x11\xb1\x07\x7fTC\xdf*\xf0\xe3\xf5\xdb5\xb1-\xf8A\x1c\xee\xf1-\xae]@\xf0sf\xfe\xd5\x5c\xd1\x09\xd6\xcb\x7f\xa5H\xa6\xed \x0e!\xfaa\x18\xf6-\xd9\x83I\xd3\xf8I\x95\xc9\x0c\x92\x11\xbcC\xf4\x11\xb1\x8a\xbds4 373\xf3\xf5Fh\xd3$\x89\x97\xebj>\x83\xe7\x0a\xbc\x1b\x95\xfcBs\xa7g\x0a-3?n\x84f\x8ad\x1d_\xe2\x02|_{;\xda\xe9\xa6\x9a4\x87\xe3\xff\xb4(pv\xd6\x1f\xe7\x22\xb4\xc9\xf43Ue{\x08/Vv\x8f\xe3X\x9bE\x9f\x99\xeb\x11\xb1+3\xe7\xf5\xe0\xcc\x8cLG\xf0\x87\xb1\xd6\x90\x1f-\xe0\xa3\xe3\xde\x8e\xd0\xe6\x95!\x22N\x0c\xc3p{\xa3\x83\xf6\xe4k32;\xd2\x90u\xcb\x0a\xed\x22\xfc\xbe\xe0\xe4\xe6\x91,%4\x8cB\xdb\xae\xa1\xd2\x82o5\xb7k\xbb\xd9?\x82\xads\x11\x9a\x9aZ\xcf7 \x9f\xe2;<Q\xeb5\x5c\x81{j}\xb8\x8f\x88\xd5\x88\xd8;\xab\xc9e\xa3\xd0\xce\xe24>\xa9\xfa\x7f\x83\xfb\xf1`\x13{\x12O\xe2\x03\xdc\x8a\x9fv\x84\xb6\xe0\xaeog\xe6G\xd8\xac\x1e\x1c\xc0K\xf8\xa1\xfa\xb2\xd2\xc4\xee\xae\xf9po=\xe7?\xf6K\x0a\xad}\x22N\xe3\x8b9\x13o\xb4S\xf8|\xd4\xc1c\xb8z\x81\xd06\xd1\x0a\xad\xd5\xc4V\x8dQs\xe6t\xf6\x99\xf9\xda2\x9f-E6\xeb\x10\xef\xe1\x81z\x8b\xde\x99\x9e\xd3\xff\x0e\x00M\x1f\xd3\xc2\xe83+_\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x06\x0f\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x05\x95IDATx\xda\xac\x97\xdbo]G\x15\x87\xbf53{\x9f\xe3[\xdc\xf8\x90&qM\x1a\xd4J@\xa4H\xa9\x04%\xf4!54\xb4Am\x10\xbd\xa0\x16\xa8T\x9e\x10\x12\xe2\xf2\x96\x97\xaa\x8f\x8d*\xf8\x03x\xe2R\xa9\x82J\x85b\x81\xa0\xb4N\xd2\xa4*B\x22 \xd4\x0b\x08(\xaeS\x92\xbai\x9c\x8b\x1d\xdb\xfb2{f\xf1pNN\xce\xf1\xe5\xd8I\x18i\x1e\xb6\xb6f\xe6\x9b5\xbf\xf5\x9b5\x02\xb8#\x07\x86\x7fm\xfc\xd2\xfd\x1a<\xaa\xf4l\x22\x96\x90\xa4\x13_x%{\x04\xa8\xf8?\xb4\xa1c\xf7$z-\xed\xfc\x9f\xff\xa0\xbf\xdbg~\x05\xd8\x1b]\xdc\x01u\x0d\x1e\x80s\xc7_\x04\x11X\x1e\x05\x01bd\xcb\xf8C\xfc\xf2n\xe1\xe1\xe3\xca\x9d\xcfL<\xf8\xd2\xa1\x83G\x0e\x9c`\xff\x8dD\xc2\x00\xd2\x0e\xbbM\x11W\xc3\xd4\xeb]]\x5c\x0dl\xdadQ\xc8\xde\x9f\xa2\xb1\xf7\x01>ux\xe2\xee\xc9}\xbc\x07$7\x02p\xf5#M0.ALw7.\xc1\xd4Z\x00\x02\xc9\xf0\x16\x16\xfe\xf3&\x8d\xbb\xbe\xc4\x9eg~\xb3}r\xdcL_/\x84\x01\xda\xc2\x13q\x88KV\xef\xc6\xb5\x8f\xc3\x0d\x0cQ\xdf\xb6\x93\xec\xf4;4\xf6>\xc0\x1d?\xf8\xfd\xe8\xd1{\xfb\xce\x5c\x0f\x84\xeb:\xea$ED\x9a\xdb\xecl\xaa`\x9a\xc1\xfa\xf8\xfe\xaf\xf1\xe2xS'\xaa\xa0\x02\x0f\x1fW(\xcb-\xc0\x00p\xe9\xba\x01l\xe2\x9a[\x5c\x0e\x00\xa0\x91\xfc\xc3\xff\xb2\xeb\xc9\xe7\xd8\xf5\xe4s+~G\x0d\x00\xe9\x8dE\xc0\xd8\x1e\x00\x86r\xf6\x03\xcas3W\xff\xab\x82\xc0\xa6]w\x92\x078:n\xce\x82\xa2\x1df\x22b!M?\xfc\xfc\xcb\xd9\x18\xe0{\x02`\x13\xdaJ[\xcd\x84:\x05\x03]\x0bU\x06>w,\xac:\xee\xc2\xc9\x97o\x9e,\xbf8\xbd\xff\xd5\xb8s9Dw\x04\xac\xeb\x09\xb0\x02\xa8\x13\x06\xb8x\xf2(!_DcX\xe1\x1b{\x0eO\x8cN\x1e:8\xbd\xff\x04]\x10\xddi\x98$\x18\xe7\x10k\xc1\x18\x22B\x00\x22\x82\x8a \xd6b\x9c\xeb\xeeI\xd2&\x90\xb4\x8e$k\xfb\xc6\x9e\xc3\x13\xa3\x93\xfb\xe8JY\x03H\x04\xbc\xf7x\x05\x8fPF%\xaf\x02\x99\xaf\xc8JO\xe6=E\x88\x94Q)\xa3\xe2\x15*\x84 \x165\xb6}>\xc6%\x98$]\xcf7F;}\xc3\x00\xd6+dY\xc6RQ\xb2\x98\xe5,f9\x0bK9s\x0b\x8b\x5c\x9c_\xe0\xd2\xe5E./f,,\xe5,\xe5\x05KyA\x96\x97\x94!\x10\xc5\x12clkH\xac\xbb&\xdfp\x80-\x02\xcc\xcd\xcd\xb10\xb7@\xd4HY\x96|d\xc7\xc7\xd0,gl\xfb6~\xfe\xb3g\xf9\xf4\x1d\xbbq\xce!\x22\x88\x08I\xe2\xe8\xab\xd7\x11\x93\xb4\x01\xc4\xba\xab:\xda\xa0o\xb8\xbb>1\xf2\xcd\xcf>\xf5}\xa6\xfe\xf8,&;\x87\x1f\xba\x8dO\x1e\xfc\x0e3\xffz\x13\x83\xe1\xd4\xf4{<\xf6\xed\xef\xf2\xd6\xd1\x170\xa7\x8eP\xf5\x8f\xb2\xb3\x91\xf2\xa3\xe7_\xe5\x89\xef=M]\x04UEc\xcbG*\x87\x1aY\xd77\xb4\xe5\x1b.\xaa\xdc\xba\xed\xbeoQ<\xff\x14\xdb?:\xcc\x89wO\xf3\xf6\x07PO\xc6(}\x00\x1b)\xce\xc3\xcc\xa9w\xf8\xf2\xce>\xa6\xcf\x9ef\xec\xdeC\x5c\xfe\xf1K\xa8\xb1\x88K\xba\xb3\xc8\xd8V\xba\xf6\xf6\x8d+\x09\xe4\x14\x99g\xee\xdf\x9c\x9az\x9b\xf3\xf3\xb70VK\xf9\xc7/\xbeA\x0c9\xd66\x07\xceT\x96\xdd\xb7\xd4xcj\x89\xf9\xd93\xdcv\xe6\x9f\x18'\x5c\xfa\xdbk08\xc0\xf0\xfd_o\x02\xb8\xa6\x06\xb4\xb5`/\xdf\xe8\xf4\x01\xa1\x0a$\xb5\x01\xb0Cd\xbe`\xf7\xcd\x19F\x84\xa0\x8a\x01T<\x97+P7\x84K\x07 \x04\x88 \xce\xb6\xcf\xba\x13`M#[\x03\x00|F\x15!\x8d\x15Q\x0d\x17c\xff\xaa.(T\x84\x08\xf8\x02bD\xaf\xa8\xffJN\xdb\xa67h\x5c\xc3\xc8TW\xc09T\xa0(\xd0\x00>\x04z\x16\x85\xd1\x10\x02\xa8/!\xad\xd1\xf8\xcc}\x8c\x8c\x8c\xb4\xee\x11\xa0\x95\x82D\xb3\x01\x1b\xbd\x02 \x80/\xa9\x14l\xa4'\x80\x88\x12\x14\xbc/(\xcf\xbd\xcf+_\xbd\x9d\xa1\x04\xfa,l\xdb~k\xcb\x8dl\xf3J_{\x92\xee\x08\x18$\xc4b\x11\xafB\x12\xc3\xaaB\xe9\x1c\xdb<\x81\x82\xb4\xb1\x95\xf1\xa7\x7f\xc2\xe6\xcd7180@\x92$h\x0cM\x1b\xd7\x1e\x11\xd0e\x00\x22T\xbe*\xd0\x18\xa9B\xdc@\x04\x84\xe0\x0bT\x05\x93\xd4\x10\x9b\x22.E\x8ci^B\xe2\x96\xdd0\xcb\xe7X\x0e\x80\x04\x9f\x95\x04\x84PEV\x96\xc4\xdd\x83C\x88\xf8\xb2\x84\x18\x11\x01c\x05c\x0c\xc6\xb9\x15\xb7\xfb\x86\x0a\x125\xe0}F\x8c\x10\xb5\xb7\x06E \xa8\xa1(\x0a\x14\xb8\xf0\xd7\xe3\xe8\xa6!\x8a\xfe>\x12\xb7\x81'\x826\x8b\x95\xad\xf7|\xa5\xbdOg\xa2Pd\x19A\x05_\xf9u\xa5\x1bE\xa9\xca\x1c4\x22FZ^\xd0t\xc0u\x85/\xda\xf4\x90\x0e-:\x0c\x14e\x8e\xaa^\xbd\xd5zD@+(\x8a\x1cmmAcD\x88\x08\xeb\x171\x1auE28\xd4P\x16\x19Q!\xe8zi(\x04\x11J\x9fA\x84\xc6\xde\x03\x8c\x8c\x8c088H\x92\x5c[E.6\x01<\x0e4dYF\xa1\x8a+\xd7\x7fa\xf9\xa0dyA>{\x86\x93\x8f\xdfN\xbf\x85\x9a\x05'\x1b\x5c\xb8e\xd9\x17\xb4\xefu\xf0\xea\x86\x07j[w4j\xcc\xcf\x8f`7``1Fv4R\xfa7\x0d\xeac\xc7x\x04\xc8[5^\xdc\xf8\xfe}\x05~\x16\xc8\xdd\xd4\xd9\xb9\xdf\xfe\xf0\xa7\xaf=z\xf6R1kd\xfdITEn\x1a\xacm\xfe\xd3\xdfg^\x00\xfe\xd2z\x98\x86k\x03@[\xe0\xd9\xff\x06\x00\xc3\xf3\xde\xb2\x802\x10\xf7\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x05\x5c\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0w=\xf8\x00\x00\x05#IDATx\xda\x95T\x0dLSW\x14>\xe7\xbd\x16\x1c\x08\x14Z\x0b\x8f\x8c_\xa1n\x0aR\xa0\xa8l8\xc9\x92\x99\xcc\xf9\x83d\xb2%\x9a8g\x9c[d\x89\x9b\xc6\xa8\xcb\x92%nY4\xce\x01N\xe7@\x81\x90,a\x9b+L\xdd\xccb\x86\x22f\x1a\xa7\xc4L\xc4\xd9\xc8\x9fT\xca\x9fTJK_\xdb\xd7ww\xdfk\x0bE\x9b\xe1N\xf2\x92ss\xcf\xbb\xdf\xf9\xce\xcf\x870\x8b\xd5\xd5\xd7+\xd4\x1a\xcd\xad\x98\xe8\xe8\x85\x04\x00\xe9G\x02w(\x9f\x09\x91=Blaa\xe1\x07\x13\xe2\xb5G\xe7\xa7\xa7\xdb\x83b\xfe\xdb\xf4z\xfd\xdc\xca\xaa\xaa\x89\x02C\x01q\xb9\x5c\xfe\x07\x03?\xa3|\x9e\x1b9\x17\xba{\xba\x91e\x98\x91\xa1\xe1\xe1\xaf\x92\x92\x92\x0e\xa5$'\x93g\x05PUTUYs\xb2\x17\xc3\xb8\xcd\x162&^;\x0f\xda\xae\x5c\x81\xa2\x97\x8b\xc0b\x19x\xfc\xe0\xa1ywzjZ\xad\x042;@N\xb6\xaa\xe2\xe81\xab>'\x87\xd8'\xec\xbe\xa4\xa7\x0d\x095\x8dF\x0dmmmX\x5c\x5cL\xbc\x82\x97\x82X\xc6\x07\x06-\x07\xb9\x84\x84of\x05\xc8\xd5g\xa9\xbe\xae:n\xcd\xd5\xeb\x89\xc3\xee\x90\xe2\xa7\x01\x10e\x00u\x5c,\xb4\x5c\xba\x84\x1c\xc7\xc9\x19\xd3^ \xef\xe2G\x7fij^?+@\x9e~\x91\xeaH\xe5qk~^>8\x1c\x8e\x99\x97(7\x17\x22\x22#\x80\xf6\x07\xbc^\xaf|\x0e\x0b\x0b\x03\xcb\xe0 l\xd8\xb0a\xdd3\x00\xbcH\x01NX\x0d\xf9\x062\xe9t\xced\xe0\xeb\xa1D\x22\xc8\x97\x18(p``\x80\x94\x95\x95\x95\xce\x0e\x90\x9b\xa9:Rq\xcaZ`\xc8'N\x9eG\xfa|P\x89`\xfa\x1c\xe4+\x94\x0a4\x9b\xcd\xcf\x0a0\x9f2\xa8\xb7.)0\x00\xcf\xf3!\xf2\x7f\xda\xa7\x00\xd0\xdf\xdf/\x95h=\x9e>\xfc\xc6\xab\x0b\xb5\xe1\xe7\x95\x11*%%\xe8_$\x04Q\xe0m\xa6G\x93{vU^;]]k|\xb4\xac \x8f\xb8]\x1e:\xeb\xac\x5c\x12\x91x\xa5\x84CB(X\x05\xf6\xf6\xf5\xfa\x18\xe4g\xc4\xac=\xd7\xf0I\xf3\xbc\xf9\xa9@\xdc\x13\xfe \x11xW$6V\x1chk\xbf\xff\xf8\xea\x9b\xfb\x9b\xf6,_j\x90\xde\xc5\x9a\xd6\x03$?u\x05\xe8\x93\x8a@\xf0\xba1\xd0\x80\xe0\x12\xb1\x0a\x16{zz\x02\x00\xd1%\xe7\xea\xf7\x1a\xe3\xb4<\x10\xe7\x90\x8f\x01\x11\x81\xd5\xae\xa2\xa3\xa6$4a\xf8\xabG\x8b+\x96\x17\x90\x83\xbf~\x84Y\x1c\x90N\xcb \xac\xc9\xdd\x0b\xe9\xea,\x14DO`\xb9\xa7\x18\xb0,\x8b]\xdd]A\x00u\xbb\x9b\xd4\x1c\x8ds=\x9a\x8ac\xe9\x08Vw^\x87\x0bC\xf7\xa14\xf90\x8c\x85\xdf\x84\xe483\x0c\x8d\x1b!Q\x95\x07\x97M\x1e\xd8\xfaJ\x03dh\xb2A\x10\x85\x19\xc3\xc50\x0c\xdc\xef\xea\xf2\xf5\x80\x96\xa8\xe4\x5c\xedN\xa3\xfay\x96\x96\xc8Ji\xa2?\x0b\x05\x9e\xec\xb8F\xb8\x0c\x15\xdc~\xd0\x8f\x89\xb1J\xc2\xe2?\xc8{l\xf2}N\xcaj\xf8\xf6\xf7\x09<\xb5\xb5\x85\xc8\xf3/\xfd\x18(\x11\xcb\xe0=\x93i\xaa\x07\x14\xa0\xbcI\x93<\x87\x02\xd8 O\x14\x00j\xfe\xfe\x13\xe2\xd2\x09\xad\xf5=p\x0bv\xf9\x93vK\x0a1\x99c\xe1\xdd\xc2\x16\xd0\xc5/\xa6\x0d\x17\xe5\x7f\x02\xed\x90\x18P\x80`\x06\x1f\x18\xb5\xa9\xd109jAQ\x14\x89/H\x81\xf5\xa6\x1bd\xde\xa2q\xb0:;\x90\x91\xb8\xd1FR\xa9!]})\xb0\xad\xa8\x19\x16pz\x0c\xda\xb2))\xa7\x0a\x82\x9dw\xef\x12\x0a\x10`\xb0\xdd\xa8\xe2\x22\xc0|\xf3\x0e>\x17\xff\x82L\x94e\x94\xd8\xd8w\x99\xc4\x1b\xee\x82\xcd\xf3\x10\x19\xf0\x01\x0c\x8f\x01Q\x8e\xee\x82\xd5\x8b\xcai\xed\xdd\xb2\x5c3\xd2\xeb(]OI9\x0e\x0e\x0d\x8d\x97\xef\xd8\xf1\xb6\x0f\xa0n[\x13\xc3\xb8h\xd61\xa0N\xd3\x81\xc8\x9b\xa9\x1f\x06\xd5\xb7Z Jw\x11&\xd1\x09\x8c?M/\xadF\xc7m\x0e\xca\x126BF\x8c\x8a\xea\x02\x07\xfd\xed7l\x9b\xbfh\xfb\x9c\xe6\xe0\xf5\x87\x11\x91\x10koo\xefE4P\x80\xb3\xb5[\x8c\x8e\x91AH)|\x0dA\x18\xa3E\xb7\x02\x83\x0al\x1d\xe9 ?\xdfn\x07\x8f\xc0\xca\xb2\x16\x907\x81\xba\xbb\x96\xbe\x0e\x0b\xd4\x1c:\xec\x11\xe4\xc2\xe93\xcd\x9b\xbe\xbcZ\x9e\x9a\xc2M\x22\xd0a\x91VBDa\xe3\xa6\x8d<\x1a2U%\xc6\xcaR\xa3(\x08\x90\xfc\xd2J\x14\x9d}DJD\x12~\x94\x96@\xda.\x85R\xae5\xfa\xe5\xd9\xb7\x8bt4\xd9H\x1c\xe9\x19\xf5~\xb8\xaf\xe1\xfdX]V\xc3w5\xe7\xddOJ\x8d\x0c\xf0\xfdg\xc5Mqi\x19\xa0\xd1\xe9\xe8$\x8d\x05)\xd9\x13\x22\x10$\x0c\xd24\x8b\x10\x05]\xd7\xda\x07\xca>\xfei\xf3\xce}\xef\xfc\xb1\xe5\xbd*\xf24\x80.v\xdd\xc9\x9d\xb9\xc6\xc4e\x858'*\x9cr\x13\x88\x7fTC\xe9\xcc\x0c\xb9v;\x89\xf7lc\xcb\x8f\xc7\xce\x98\xf6\xdf\xb83\xdc\x17J,1Q='oma\xe2v\x10\xc5p\xa9\x0a\xb3\xa9k\xb0yEb\xbfnz|v\xd9\x8a\xcc\xd6\xea\xfa\x9b\xce\x90\x00o\xadZ\x18\xf9\xc3o\x9d\xb1\xd4W\xfe\x9f\xc7\xfd\xe6Y\xa2O\xb4\xadY\x99i\xff\xf4P\xab\x18*\xe0_\xe0\xfa\x8c\x10\xab\x92)\x19\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x07\xfe\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x07\xa0IDATx\xda\xdcW\x0bLTg\x16\xfe\xee\xbc\x18\x19\xa5PP\xdaUD*\x02#\x8au\x80UDjW\x04\xb4\xa5\x9a\xe9nmR|\xc4M\xb7\x89\xe9\xa6\x1b\x9amH\x8am\x93\xa6l\x84\xed\xb6f\xb7\xa9ib6M[[\xb7\xab\xd5\xb6\xd8ZGmA`\xd4\xc5.\x94\xe2\x08\xc8[^\xf3\xe01\xef\xf7\xec\xf9\xe7\xde; \xd2\xednR\x9b\xa6790s\xe7\xdc\xf3}\xff9\xe7?\xdf\x7f\xb9P(\x84\x9f\xf3\xa5x\xe45\x84\x1e\xaa\xc1\xa7\xf4Y6\x97\x83\xe4\x0e\x13P\x05\x82\xc0\xc6e(\xddR\x85\x0f\xe9\xbbtN\x02\xdb_\xbf\xfd\x87\x1f\xe8\x92{|\xc0\xef\xb7\xe9\x90w\x1f\xb4\x9b_\xc1?g\x93`i\x91\x07\x030\x16W#6H\xed\xc0\x01C\x5c\x10\xef\xba\xcd\xa8\xae\x7f\x0d\x93\xff'\xa0\xa4\xb8\x0a\xe5!\x09\xf6\xd0\xc2\xb3X<\xa7\x17\xb0Q\xb0\xfdE\xc7\x11\xc2o\xb4\x1c\x918\x7f\x00\x8f\x91o\x80=@x\x88-\xa9\xc1\xc4\x07O\x9d\x80\xd7\xef\xc5\x95\xde\x06\x5c\xed\xbf\x8c\x86\xaefHC\xa89\xfb<\x9e\x17\x9d\xff\x1bpQ\x15\x9e\x0dJ\xf0\xe7\xfc\x159\xd0,[\x87\xdc\x94|\x84\x88E\x80\xfe\xf8\x89\xc9\x99\x7f\xef\xc6\x8e\xdc\x13x\xeb\xfc\xaf\xd1\xd4\x85\x93\x22\x09F`Qq\x0d\xc6\xca4@t\xd4R$\xc4\xe4`I\xfc&\xd8\xbd.\x1c\xd2Uc\xc8<q\xf2\x5c%v\x92\x9f\xff;\xc0e\xac\xbe\x8b\x13\xe2\xb4\x7f(\xae\xc0|\xe5<\xdc\x1c\xaf\x83\xd1\xfa/L\xd8\x07#N\x1c\xc7\xff\xdf\xb6\xf6(\xfe~\xa1\x0c\xfa.\xd4\xd2\xe2\xb4\xac\x1e\xaa\xd4\x22<\x97\x95\x04\x02\x9d\x82\xd9n\xc0\x80\xe5\x0c\xe6G%`\xbb\xe6\xb709\x06\xd5\xf25c\xab{/\xe08\xf9\xce\xde\xb3\xd2BJi^z\x96\xb6|k\x05L\xb6\xcb\xf8\xba\xff\xd5p\x0c\x8f\xdf\x0a\x09E\x17\x8d\x93\xf0\xf9\xb6\xd8\x9a\xa1\xcd=\x84OZ>N\xeb\xd6\xe1\xd5\xc8.`NR\x19oAr4\x8c\x1eC\xeb\xc0\x9b\xd8\x9b\xbf\x0f\xeb\xd3Vi\x0boo\xa00\xf8\xfa\xf4U\xda\xbd\x05\xfb\xd0z\xf3M\x18\xc6\x8e\x85\x9f\x15\xe3Hd\xd3\xe0\xcc\xd8=\x87\x7f\x00G\xea\xf7\xc1kE\x1d\xeb\xbfp\x06\xd2J\xf0\x9cf\x19\xcf\x90\x13;\x83\xcc\xee1\xc2\xe1\xbd\x89\xc2\xcc]\xb08G\xd4\x8a5&1\x13\x12\x06\xbe.m\xa5v\xf7\x03\xbb\xd0>\xfc6\x8c\xf6v~\x11d2\xd9\xb4I\x088\xc4\xf1\x04\xe4\xf4\xbd\xdf\x0c\x9c\xd7\xa3\xbe\xeeOx\x86\xe2X\x18\x81\xe8\x94<lh\x1b\xc5\xd2\x01\x0bM\x0e9\x10\x1f3\x93\x84\x09N\xcf\x106g\xee\x84\xd91\xaaV\xac\xb6,O\xf9\x15\xb4\xeb\xd4\xe9;w\x15<\x0e\x83\xf1(\x95\xc9\x10\x01\x97\xd3\xf3\xfdc@s7\xd0\xd6\x07\x5c\xea\x00\xb2\xd3\xf8L\xf4\x1b\x09\xbc\x09\xf5\xf5\x07QN\xd1o\x92\xd9\x19\x01iO\x1d.\xde\xd0\xe1xH\x8a\xceI\x05\xb2\x9c~\xa8\x92\x17\x09\x05'\x12\x0e\xaf\x99\xb6\xd3\x086\xa9\xb5\x94\x89\xd1,G\xc0\x9aU^\xfa;t\x98>$R\x1da`\x11\xfc\xd25\xa0\xa5\x13\xc6k:\xbc\xdc\xf07\x1cJ/AYN\x06\xd0=B\xe0\x0d\x11\xf0\x01\xb2\x09\xb6\x0b\x18\x01\xda,p\x92M\x9a:\xd1\xd3s\x01\x9f\xaa\xd2\x91\xea\x95 99QH\x1f\xc7\xf6\xb3\x05N\xdf\x08\x0a\xd4\xa5\x180\x8d\xa0\xbe\xe3\x0c\x12\x16\x9a\x22\xf5e\xe0\xfao\x81\x9eA\x98u\x07\xf0\xa8\xa5\x0b\xcd\x14\xd3\x96\xb1\x15O\xc7\xa8\x00]\xdd\xed\xe0\x10\x9a\x8a\x11\xf0\x0a$\x1cd\xae\xfeF\xe8\xa33\x90\xe1\xe5\x88\xc4\xbd\xd3\x99p\xfa'\xe0\xf2\x19Q@\xcb\xba>4\x86\xce\xe1),O\xe2k\xabo#\xf0\x01\x98\xcfVb;y\xf7\x93\x8d\xb2]\xa3\xde\x8a?v\x5c\xc7\xa5/\xf9\x9a\xdf\x02.\xb6\xdb\xec\x8b\x91\x8a#[\xba\xa9\x02\x7fY\x99\x89\x07\xf3V\x02>z$HT\xd9t\x8b\x8dN\xc6\xd2\xb8\xcd8\xd6p>\xdc\xd5\x0a\x22p\xa3\x17\xa6\xb3\x07\xb0\x83\x9e\xa3\xca\x83Z\x0d>!N\x92\x803L6>{\xa8\xcd\xa5\x01l\xc1\x1e\x96\x15\xca\xc4\x95\xd8l<fua^\xf2/\xf8r\xb0P.\xff\x14\xdc\x01\x0b\xf2\xd26\xa2{x\x12\x86>k@W\x89\x92Y\xe0\x10\xb2\xeb\x10\x80msM\xd4\xef\x12!F\x22P\xf8\x12\xde\xd0d.\xb9_.\xbb\x0b\x1d\x83V\xa4\x88$\xe8r\xd3\xa0q\xfb\xc7\xb1.u=&\x1c\x93\x12e\xa6=\xa1\xaf\x0e\xef\x09\xe5\x8c\xc4'9v\xa4\x16bu\x97\x0e\xff\x10\x08\xfdO\x04\xa4E\xaf\xe0\x83\xac\x95\x89\xdaG\x0a4\xb8\xfb\x9e\x11\xdc\x18\xb4a\x80:9e\xc9t&\xdcA\x1b\x91\x98@\xf6r\x0dl\x1ek\x86j\xadCMM|b\xc6\xc4\x8cI\xdd\x82\x8a\x8dk\x91\xc6\xa5cu\xcf\x1c\xd3T\xec\x01\xee\xa1jl\xf0{\xa8\xd4>\xc4J\x948\x92\xb9\x22!\xa9\xf4\x815\x18\xb3}\x03G\xc0\x14\x1e*M\x97\xc8\x91\x14!?\x9b\x84\xc1\xcf\xf7\x04\x8b\xa6R,\xc4=\x0b\xb2P\xaboE{\xafy0\xe8\xc6\x932\x05L#\xad\x88J\xd4@\x7f\xf8\xe9\x97\xf1N\xe3\x8bhl&\x11zaZ\x09E\x022\x02?\x15\x08\xe1\xe1\x18\x95\x14*U\x142W$b\xd5\xf2\xc509\x0d\xd4\xf9\x96\xf06\x94Q\xae\xa2\xa2\x80\xe3\x1f\x01\x89\xf1\xc0\x86\xb5T\xe8\x19$\xa2\xe5\xf1X\xa8R\xa3\xbdo\x08\xdfv\x8f\xc1\xe6\xf4`\xd2\x1e\x00;\x0f\x1c\xda\xffL8\xd9\xa7\xae\xbe\x0e=\x918\xf7\xc2\xadr\xbc`[5\xac\xcf>\x95\x0b\xaf7\x18\x0e\xe8\x09\xd8`u\x0f\xc2\x1bt\x85\x1d\xd88U(h\xc8\xe8\xa9\xdb;0\xec\x1a\xc7\xb5U\xb9\xd8\x92G\x99\xf0\xf9\xa6I(\xa4\xf3\x10\xa3HB\x94l\x01\xaf~d\xc1@\x00q\xca\x1c\xb4\x99\x8e@s\xef~\x9cn9\x8c\xa6+\xd3$\xd8\x81D\xc9\x02\x98\x9c\xedp\x13]o\xc0\x17I\x0d\x1b0\x12a\xc80\xf0\xee.\x1a2/\xa2\x8cI\xb3\xb4\x02rr\xda\xb4\x81Hx\x05\x12>\x22lvw\xf2\x1a\xcd\xc9\xe9Y\x19\xc5sa\xc8\xde\x12&\xd42v\x18%k\xca\xe8\xfeQ\xad\xb4\x0a\xa7\xbe\xa8\x84V\x22\xf6\x81/\xe8\xa4\x83\x83O\x94\x80\xf0\x03R\x09\xaf\x0d\x97\x19x'\xc6i\x9f\xb3!s\x83Y]5\xca\x0d\xdf\xa0^\xff5\xf9(\xc9\x97\xfc\xb8\x19\xca\x17\xe0|\xf0\x85\x5c\xfc\x22\x04If\x07\xc0\x9e\xa9Z\x14\xdd\xbf\x03\x9c\x0c\xa5tg\x9ed\xe6\x81\x81\x19[\xb18\xd7Y\x8d?\xab%i\xbe\x8a\x8b\x04\xfe\xb0\xb0\xcfIj`\x22\x1b\xac\xa3\xd1jh\xc5\xc5\xd3:\xca\x02\xf9R\xe3\x85%\x98\x13\xc0Dc\xbbFTD\x1f7\x85sm\x1f\xcf\x92\xe3b\x92\xe3l\x9e\x00\xeb\xf6\xe1!\x02\xbdN\xe3\xb5\x91\xe6\xa9\x01o5\xfe\x15U\xc2\x18\xb5\x08C&2\xac\xfa\x1a\xd04\x7f113#\xc7\xe9\xe6\xc9\xdf\x1d\xcfk\xbfL\xce\xaf<$\x94SN\x04\xfbI\x03/\x9cA\xfdW\x07y9f\xd9^X\xf4\x12N\xca\xa2\x91\xcf\xcep,\xff~\x17z\xadC\xd0]9\x82\xf7\xe9\x8eQX\xf5\xd4\x1c\x93\x8c\xf5\x10\x13\xefDv\xb4\xfb\xe5\x93x\x22f\x09\x8adJ\xa4\xb0\xf7\x1d\x06\xec'\xba{v\xd3g\xfa\xd2GK\xf8\xf23\x12\xa5\x9a\x88(M\x86w\x01\x9b\xfbd\x09B@F\xc3%\x002\xe1\xb0\xb2\xc17\xd7\x14\x9bq\xb4W\x0aD\xd8\xec\xbf\x8b\xd5V\xb8/\xdfz\x10\x9f\xef\xd9\x0btv\xd1\xcako\x01\x9f\x10w\x81K\xb8a\x14\xfa/$\x1c@\xddB\x9a\x83\xdfs\x22\x16\xe5\xdc-\x90V\x0a\x0ba\xb1\xe2XI\xc2+?};\xf8\x8f\xf1j\xb6h;\xbd\x9a\x15V\x82\xf6\x114B\x96\xa5s\x8d\xe2;u}\xaf\x1c\xdfi\x02\x0a\xa1\x1f\xc4\xbe\xf2\xff\xe4^\x9f\xff#\xc0\x00\xf6\x985Syty8\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x03@\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x02\xc6IDATx\xda\xe4\xd7Mh\x15W\x14\x07\xf0\xdf\x95DI\x89\x1f-H\x85\x88\x8b\x22\x14\x5c\x09BA(tU\x90\x16,-\x82 HBQ+\xe2B\x10\xa4\x81\x80d\xf5 \x11\x05\xb5\xa9\xd8Z\xa8T\xa8H#B\xa1tQ\x0a]H+\xae\x5cH\xa8\xc5/\x14k\xa8I\xfcH\xe3\xcc\xed\xe6&y\xf9\x987\xef\xf5\xa5f\xd1\x0b\x97\x99a\xce\xb9\xe7?\xe7k\xfe'\xc4\x18-\xe6Zb\x91\xd7\xa2\x03h\x99\xbc\x09!\xd4\xad\x14\xcfZ\x8duX\x8a\xbf1\x1c:\xdd\xac[\xbf*\xeca\xf2\xa1\x0c@<\xebml\xc7\x87\xe8\x98Gd\x18\xdf\xe3|\xe84Xp\xc6`\xe8\xb4\xb5!\x00\xf1K\x1bP\xc1\xd6\x06<{\x0d'C\x97\x81\xaas\x22\x84\xaei\x9b\xa5\x00\xe2\x19\xdb\xf1\x05\xda\xfem\x8c\xc3\xc7B<#\xcex\xae\x02\xd0R\xe8\xf2\xd3>\x95\xab4\x9bd\xf1\xb4(\xaf#\x09g(\x0d\xd8\xb6\x10\xc6\x1b\xaa\x82)\xe3\xa7\xac\x97\xfb\xea\xa5\x97\xe1\xd4\xca\xf4\xce\x8ayDx)\x00\xe2q\x1b\xe5v\xcc\xce\xa3\x82\x92\xfb\x0d7\xb1\x1ao\x15\x94f\x83\x1e\xc8t\xd5\xa1s\x11\x9f\x84\x03\x1eN\x01?\xaa\x0d\xdf\xe1\xddf\x01l)\x91\xbf\x1c\x0e\xfahN\xa9\x1d\xf0,\x1e\xb1\xb9)\x0f\xc4>\xed2o\xd6\x90}\x86]\xf3VM\x9f(k6\x072\xebJd\x7f\x0a\xdd\xee\xcf1^1\xd8\x80\xf1K\xb5\x00\xb4\x97(\xdf\x9d\xb7\xd3u7\xd4\xa2kz`\xacD\xb6\xe3\xbf-\xc3\xdc\xad\x12\xd9w\xe2akB\xef\xdc0\x14\xb6\xe1\xc3V\xcc\xea#\x11O\x99\x0e\xda4\x1f\xe85\x16{\x0ca}\xc1ym\xf8\x0c\x1f\x14\x1a\xec\xf1\x06^\xc78&d\xb2d,O\xc6c\xea\x1d\xf3\xff\x0dc\xb7\xe3\xd8_\xf2a\xdfb_\xa8T\xf5\x81n\xadx/\x81\x7f\x88\x91\xf4\xa5\xe3\x89\xb0\xbcH{4T\x0c\x15\xfe\x8e\xe3!\x9b\xf0k\x1d\xde\x1d\xc6\x95\xaaN\xb8\x16\xa3\xf8\x1d\xf7\xd2\xfbI\x10\xcf\x13\x88\x09\x5c\x0f}\x1e\xd4\xe4\x03\xf1\xa0o\x12\xf3id=\xc2-\xdcI\x00\xfe\xc4_\x18\xab\xf2\xc4#\xfc\x18\x8e\x88\xb5\xf9@\xae'\xb1\x9fFHH+\x96cU26y\xf6R,K ~\x09\xfdb)+\x0e\xfd\x86dvM\xa5O}\xbbEf\x99L\xbb\xcc\xaa\xaa\xfd\xaa\xccJ\x99\x1b\xa1\xdf\xed\xbaiy8\xe6\x9c\x5c\x8f<\xe5o\xf9^\x22\xd7*\xd7&\xf7\x8a\xdcr\xb9\x15\xe9\xfa\x87\xdc\xcf\x85\x94\xad&'\xdcgg*\xbd\xb2p\x8c\xa7x\x8f\xa4\xd8?NI\xf9u8\xe1|S\xb4<\xee\xb5\x01}x\xbf\x06\x80\x09<IFGp\x15G\xc3)W\x17n.\xd8Ss.x\x81\xfb\xb8\x8c\x0ba\xc0\x0f\x0b>\x98\xcc8`\xb7\x0e\xac\xc1k\xa9\xe6G\xc3\xe7n45\x19\xfdo\x87\xd3\x7f\x06\x00\x18P5\xa0\x01\x90u\xcd\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x09C\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x08\xc9IDATx\xda\xc4\x97{pT\xd5\x1d\xc7?\xe7\xee\xbdww\x93\xcdf\x13\x02\x11!Y\x82Aj\xaa\xe5%/\x07B\xa0Z\xa5\x88\x83\x1d\x15-\xb5>\x86\xaa\xd3\xa1\xd5j\xb5v\x1c\x91\xa9\xb5V\xa7R\xa6Zk;\xb4\x16\xac3\x96q\xea\x03AG^\x86\x00\x01\x1f\x88V\x9e\x22\x1bH \xef\xc7\xbe\xf7\xbe\xce\xe9\x1f\xd9`\xa0T\xd2\xe9\x1f\xbd3g\xee\x99\xb9\xe7\x9e\xf3\xf9}\x7f\xbf\xf3;\xbf#\x94R\xfc?\x1f\xf1U\xdf\x1e]\xbeb\xba\xe9\x0f<(\x04\xf3\x95RA5\xe8'!D\x16\xc4\xc6t:\xf9\xd0\x93O\xfc2\x06\xfc\xd7\x96(\xa5\xce\x0d\xf0\xfd\xdb\xee\x88\x8c\xbbx|Cqq\xd1\xa5\xb3g\xcd\xa2zl\x15\xa1\xa2\xa23\xc6\xa4\x92I\x8e\x1e\x8b\xd1\xb0c\x07\xf1x\xf2\xb3\xcf\x8f\x1c\x9e\xbdv\xcd\x8b}\xff+\x80x\xe4\xd1\xe5\x0bL\xc3\xbf\xfe\xba\x85\x0b\x988q\xc2\x90&\xda\xb7\xef\x13\xde\x5c\xbf\x01\xdb\xb1\x16>\xf1\xf8/6\x0cU\x8d\xb3\x01\xc4\xc3?\x7f\xe4\xbaH\xa4\xe4\xf5{\xee^Jqq1\x00[?m\xe2\xf5=G\xd9\xf3y+\x9a\xe8\x1f.\x95b\xfa\xb8\x91,\x9aV\xcd\xbc\x09c\x00\x88\xc7\xe3\xbc\xf0\xc7\xd5\xf4\xf5\xf5.\xfa\xf5\x93O\xbc9\x14\x883\x00\xbe\xbd\xe0\xda\xb2\xda9u\x9dw-\xbd\x83\x92\x92R\x12\xa9,\xcbVo\xe5D\xc2\xa3\xa4l\x14\xe1\xe22tC\xe0\xf3\x094\xa1\x91\x8ew\xd1\xde\xda\xcc\xc8\x02\xc1\xb3K\xe7\x11\x0e\x05\xe9\xed\xed\xe1O\xab_d{\xfd{\xc37nx\xabk(\x00z\xbeoL\xb9|j\xfd\xdc\xbaZJJJ\xe9K\xa6\xb9q\xe5\xdb\x04\x22\x15\xd4M\xabd\xee\xb8\x22\xaa\x86\xf9\x89[\x1e)[\x92\xb2%M\xbd\x05\x1c\x8f^\xc0\xf1\xa6fn\xf8\xcd\x06\xd6=0\x9f\x92\x92R\xe6\xd6\xd5\x92N\xa7\xeb7nxk\x22\xe0\x9c\x0fB\x03\xc4-Kn\x1d\x1f\x89Dj\xa6M\x9d\x0aH\x1e\xf8\xdbn\x82%\xa3\xf9\xf155,\x9b=\x82\x0b\x8b\x0d:\xd2\x0e=\x19\x97\xbe\x9cK\xc2\xf6\x08\xfa\x05\xc3C\x06\xd1\xb1QFW\x8f\xe3\xa7kw\x03\x92iS\xa7\x12\x89DjnYr\xeb\xf8\xf3\xec\xb2\xd3\x00ze4\xfa\xe4\xcc\x19\xd3Azl\xfe\xe4\x04\xa7\xd2\x82\xc9\xe3*\x994\xba\x80\xae\xb4CO\xce%\xe5H2\x9e\x22+!'!'\x159\xa50MAt\xechz\x94\xc1\xa6}'@z\xcc\x9c1\x9d\xcah\xf4I8\xad\xf0W\x02\x04t\x9f\xef\xcahE\x05H\x97M\x07:)*\x1eN\xdd\xc5az\xb3\xfd\xd6\xe6\x5cE\xceSX\x0al\x05\xb6\x10XB u\x0d\xd3\xd40\x0d\x8d1c+\xd8\xb4\xbf\x13<\x97hE\x05\xba\xcfw%\x10\x18\x0a\x80_*\x15\x18Q^\x8e\xf4<\xf6w\xe4\x08\x04\x0b\xa9*\xf5\x13\xb7\x5c2\xae\x22\xeb)r\x92~\x80\xfc\xe2\x96\xd0\x08\xfa}\x98\xa6\x0f\xc3\xd0(\xbf\xa0\x94\xfd\x1dY\xa4\xe72\xa2\xbc\x1c\xa9T\x00\xf0\x9f\x0f@\x074%%\x9e\xe7\x82\xe7\xe1\xf7\x07\xf0\xf9\x04\x09\xcb#m\xf7/n\xa9\xfc\xe2\xf4\x038\x02\x02\x01\xad\x7fKJ\x85\xf4$\xae'0\xcd\x00\x9e\xf4\x90\x9e\x8b\x92r\xc0\xc0\xf3\x02\xa0\x94BJ\x09\xd2\xc30Lt\x9f a\xb9\xa4\x1c\x8f\x9c\xfcR\xf6~\xeb\xc1454\x9f@\x08\x81\xf4\x14\xae+@S\x18\x86\x81\xf4<\xa4\x90\x0c\xf5\x8c9\x1d$RJ<\xc7A\xd3\x0dP\x92T\xce%\x9d\x07p\x84 \x078\x9a \xe0\xd7\xd1u\x81\xae\x0b\x84\xa6\xe1\xb8\x12\xf0\x01\x12\xcd\x0c\xe0I\x09\xff\xc5\x01\xa7\x01B*e\xb5\xb4\x9c\xc4q=\xc6\x97\xf9Q\xd2\xe5hW\x86T\xce!\x95\xb5Hd-\x92Y\x1b<\x07\xe99\x08\x5c\x8c<\x84\xa1\x09\x84\x0f\xfa\xe2\x165\xe5A<\xa98u\xaa\x15\x05\xd6P\x01d&\x95\xday\xac)\x86\xedzL\x1e\xd1\xef\x8e\xa3\xedqZ{\x92\xb4'2\xf4\xa6\xb2xv\x0e\xc7\xb2\xbel\x8e\x8d\xe7:\xc8|:\xed\xe8\xb2\x98Ya\xe2I8\xd6\xd4D:\x95\xdc\x09\xc8\xa1\xb8\xc09x\xf0\xc0\x9aaee\xf3.\xad\xa9\xa1&l\xb1\xdd\xaf\xd1\x93\xb1q\xec\x1c~\xbf\x86\xe9\xf7\x81\xab!]\x0d\xe5\x1aH\xcf@\xba.>\xc3Dh:m].!\x01S\xca\x14Rh\xec\xdc\xd9\xc8{\xdb\xb6=\xc5\x10T\xd0\x01k\xeb\x96\xcd\xbb/\xa9\xacj\xd9\xb7e\xdb\xe8\x8ah%\x0b\x0b,6t\x07\xb1\xbc\x1cqeq\xf5\xc7\x9b\xb8\xb0\xb2\x92&\x7f\x88\x83\xa3\xaa\x08VT\xe2:\x0e\x86\xdf\xa3/i\xd0\xdb'\xf8a\x8d\x8d\x8b\xc1\xfb\x8d\xbb\xd1u\x9f\xdc\xff\xd9?\x8f\x00\xb9!)\x00$\x0em\xd9\xf4\xce\x0f\xf4\xe0R\xe7\x83O\x09x\x92%\xa6I\xbbT\xbcPT\x8d\xea\xe8&b\xb9\x5c>,BU6\xce+\xc7\x8f\x12\x9c\x5cG\xd6\xd2\x08\x9b\x92;\xc7d(\xd4\x03\x9cl\xef`\xd3\x96\xad\xdc\xbc\xf8&-\x14z.v\xff}\xcb\xfcC\x89\x01\x80L}\xf3\xf15/}\xfc\xc1!\xff\x896\x9c#1\xdc\xd8I\xc2\x9a\xa2\xda\x1f'd\xdb\x88\xee>\xe2\x9f\x1dbD\xac\x85\xbbsI\xd2\xef72\xb5\xa0\x9b\x1b\xcbO\x11\xd4\x15\xad\xed\x1d\xbc\xb4\xf6e.\x9f4\x99I\x13'2\xbbv\x0eO\xadZm\x01\xe6P\x00\xbc\x22]\xef\x9aT]m\xf9\x8a\x8b\xc0u\xd1\xa4\x87\x97\xce\x10-\xf6q\xb0\xb2\x8c\x93~A\xb8\xa4\x84TW7\xe1\x8e\x1e\xbe\x93<\xc6\xf0\xdc)\x5c||\xb0w/k\xd6\xbeLu\xf5E\x14\x15\x17\x91\xced\xa9\x1e7\x9e\xc5\xb5\x1a\xbb\x9e\x99\xf9\x95\x10\x020\x00s\xc1\xd8\xea\xc5\xcf\xdf\xf0\xbd?\xf7\x1c\xfe\x02\xcfv(\x09\x15\xd2m\xfaX\x1f\x1d\xc1\xa4\xa9S\xe8hm%\xbcm\x173\xf4\x109\xc7&4z$/\x8a,\xdd\x9eD*dtLT+--\xc5\x1f,\xc0g\x14r\xcd\x9c\x09D\xcc\x168\xfc0;\x1a`\xf6\xfd\xdb\xfc\xf4'\xd33\xea\x81\x01\x05\xc4\xd8\x92a\xd3\xf4d\x06\x9fRd\x84\xcc\xbe\xdat\xe4\xa8\xcaf\xd9\xb5c\xe7\xe17\xdex\xcbn|\xff#^\x8dw\xd3\xebZ\x98\xbaNPB\xcb\xdeO:\xd6\xad{\xe5'\xcf=\xbbjI\xeb\xa9V\xd2\xd9\x1c\xa9\xac\x22j7\x12\xa9\x9f\x05\xb6\x80\xf1\xab\x985[\xa3a\xe5\xdcs*1\x00\xa0<\xcb\xd2\x85eS\x10*\xa2\xdb\xb3\x13\x8f~\xb4s\xc5\xbd\x0d\xef\xfe\xa5~G\xfd\xaau/\xad\xb9\xb7g\xcb\xd6\x95\xaf\xbd\xfb\xf6\xbd\xb1t\xb2\xd7_\x18B\xf3\x14\x99\xb6\xb6\xbdmmm\x1f\x02_\xac|\xe6\xe9\xab\x9b\x9aZ(\xcf40\xc3\xfa+\xb8~\xa8\xbf\x1dl\x07\xc6\xaf\xfc\x8f\x10Z\xbevSB\x81.\x04\x9a\xae3nLU\xe8\xc2\x82B\xb9\xbb\xb3\xfd\xf7\xae\x94{\x03\x86\xee<t\xd5\xfce+\xea\xaeZ0ft\x85\x8e\xa6\xa1II\x10R@+p\x12hZ\xf9\xf4\xe3\xf3D\xd7A\x8e\xf4\x85\xc8e\x15\xb8\x02\xeao\x1b\x04\xe1\xa3a\xe5<k\xe3\xaf\xe6\xcc\x1c(V\x06\x00ds\x22~\xc0\x15\x0a\xdd0\x18\x1e.)\xbc\xe7\x8a\xda{.+\x0a\x17\x03Y\xe9\xc9\xdea\xc2\xf0=8\x7f\xd1\xb7F\x86\x8b\x8b\x0c\xc3\xc0Q\x92\xe3\xc9\xc4\x81|\xb2\xc9\x01\x1d@l\xd1c\x9b\xe7|\x9a\xa8\xa6#\x91#\x9b\xf5\xc0%\x0f\xe1\xc2%\xab\x98U\xe7gX\xd8\xdc5pT\x9f\x06hh;\xf9\xee\xa1\xee\xce\x96\x92\xa2\x10n:\xcd\x1d\xd3kkW\xdf|\xfb\xb3?\x9b<}\xb1\xa9i\xa6\xe78\xae\xd7\xd5\x8b\xcfv(\x0c\x06il\x8e\xc5v\xb4\x9f\xda\x0cx\xf9\x96\x03\xda\x81\xd8M\xcb\xd7\xd7\xee\xee\x9bHg\x22{&\x84\xa3 \x9d\xc3\xf3\x00(8\x03\xa0\xd7\xb1\x8f\xfe\xf6\xa3\xc6\xe7s\xaeK\xa40\x84\x88'\xb9\xac\xf2\xa2\xaf_U3\xe1G\xa6&\x22\xc1`\x90PA\x01\xe1\xc20\xa1\xc2B\xf6t\xb5}hy^s>\x91\xc9\xfc<\xf6\x80\x12\x8b\x97\xbfQ\xbb\xbbo\x02\xb1>\x9d\x5c\xce\xebG\xdc~\x1b\xa8Bd\x7f\xe8\xe9\x83\x83P\x02\xde\xdb\xcd\xb1?\xdc\xf7\xce?~\x97\xc8f\xec\xe1\xe1\x08AWQ \x95#\xc0NZ\xb6\xd7\x93\xb3Ie\x12tZ\x16w]1\xe7\xfa9\xa3*\xbe\x99\x07p\x07AX_B\xbcY\xbb?1\x96\xcex\x06'g\x83kC\xa6\x1d!\xb4\x7f\xbb\x1b\x8a<\x8c\xa6\x0b\x11\x9cXV~\xf3\x9d\xdf\x98\xfc\xdd+/\xbelJsW\x9b{\xfdk/_[\x15\x8e|\xcd\xef\xf3\x0d\x97J\xb9(T\xa1i\x9a'\xd2\xc9=\xb1D\xbcq\x90\x1b\xe4\xa0\x0b\x89\x09\x8c\x00\xaa\xfe\xbeb\xe1v%@\xa0\xd0\x90\xdc\xf8\xd8\xc6\x19\xc0\x11\xa5T\xaf8+)i\xf9\xe6+\xf3\xfbK\x0d\xcd7A\xd3\x84\xffd:\xbd#o\x99:G\x1b\xbc\xb0:\xebF4\x001\x0a\x08\x0d\x5c+\xf3\xbb\xa6C)e\x8bsdF1\x08f\xb0B\x03\x13\x0f~\x9f\xdd?W)d\x02\x85\xf9\x8cK\xdeei\xc0VJ\xf1\xaf\x01\x00P#f\xf7\x8bt\x91\xd4\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x047\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x12\x00\x00\x00\x12\x08\x03\x00\x00\x00a\x10~e\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x03diTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?> <x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.0-c060 61.134777, 2010/02/12-17:32:00 \x22> <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22> <rdf:Description rdf:about=\x22\x22 xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22 xmlns:stRef=\x22http://ns.adobe.com/xap/1.0/sType/ResourceRef#\x22 xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22 xmpMM:OriginalDocumentID=\x22xmp.did:ED2AD41810E6E011A9CCE63BC6BE0A19\x22 xmpMM:DocumentID=\x22xmp.did:8377A7CAE61811E0AD0CB4C1CFE7C6ED\x22 xmpMM:InstanceID=\x22xmp.iid:8377A7C9E61811E0AD0CB4C1CFE7C6ED\x22 xmp:CreatorTool=\x22Adobe Photoshop CS5 Windows\x22> <xmpMM:DerivedFrom stRef:instanceID=\x22xmp.iid:ED2AD41810E6E011A9CCE63BC6BE0A19\x22 stRef:documentID=\x22xmp.did:ED2AD41810E6E011A9CCE63BC6BE0A19\x22/> </rdf:Description> </rdf:RDF> </x:xmpmeta> <?xpacket end=\x22r\x22?>\x0cV\xf1\xc3\x00\x00\x00\x09PLTE\x00\x00\x00\xaa\x00\x00\xff\xff\xff;Z\xfdZ\x00\x00\x00\x03tRNS\xff\xff\x00\xd7\xca\x0dA\x00\x00\x00EIDATx\xdab`\xc2\x00\x0c\x10\x12\x06\x90\x84\x90\x150\x80\x94\xa0\xeaa\x80\xe8\xc2\x22\xc4\x80]\x15##\x5c\x1c\xa6\x84\x91\x11\x22\x88d##T\x0cM\x08C#\x92Y\xa48\x02\xbf\x87\xb0x\x1b{\xe0\xa0\x00\x80\x00\x03\x00N\xe8\x01\xf6%\xa8\x8dp\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x04!\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x12\x00\x00\x00\x12\x08\x03\x00\x00\x00a\x10~e\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x03diTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?> <x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.0-c060 61.134777, 2010/02/12-17:32:00 \x22> <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22> <rdf:Description rdf:about=\x22\x22 xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22 xmlns:stRef=\x22http://ns.adobe.com/xap/1.0/sType/ResourceRef#\x22 xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22 xmpMM:OriginalDocumentID=\x22xmp.did:F32AD41810E6E011A9CCE63BC6BE0A19\x22 xmpMM:DocumentID=\x22xmp.did:B90F01CDE61811E0868CC1233511EB8D\x22 xmpMM:InstanceID=\x22xmp.iid:B90F01CCE61811E0868CC1233511EB8D\x22 xmp:CreatorTool=\x22Adobe Photoshop CS5 Windows\x22> <xmpMM:DerivedFrom stRef:instanceID=\x22xmp.iid:F32AD41810E6E011A9CCE63BC6BE0A19\x22 stRef:documentID=\x22xmp.did:F32AD41810E6E011A9CCE63BC6BE0A19\x22/> </rdf:Description> </rdf:RDF> </x:xmpmeta> <?xpacket end=\x22r\x22?>\xd2l\xe3L\x00\x00\x00\x06PLTErrr\xff\xff\xff\xa1\x17\xe8\xaf\x00\x00\x00\x02tRNS\xff\x00\xe5\xb70J\x00\x00\x003IDATx\xdab`\xc4\x00\x0c\x10\x12\x06\x90\x84\x90\x150\x80\x94@\xf9\x0cpq\xb8\x06\x84F\x98!p\x1a\x87*\x0c\xb3\xb0\xd88h\x1c\x81\x198(\x00 \xc0\x00\x9a\xbb\x00\xe7\x16\xec\xf7'\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x03\x09\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00 \x00\x00\x00 \x08\x02\x00\x00\x00\xfc\x18\xed\xa3\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x02\x8fIDATx\xda\xb4V=o\x13A\x10}o\xf7\xceg\x13,\x0a>\x02\x0d-\x05\xa1\xa7D\x8aB\x81\x82\xd2\xe7\xbfP\xf1kR'\x22H\x11\x08*\xa8\xa3P\xd0\xa6\x80DA\x82\x10\xc0\xb1ow\x1f\xc5\xda\xebK\x9c;\xcb\x893\xd5\xcc\x8d\xf7=\xef\xec\x9b\xd9\xe5\xf1\xf1\xb11\x06\x00\x80\xcd\xad-\x8cl\xb9s\x03\x13\xf6\xd5\xdc\x04\xf0(\xfc\x99L\xbd\xeb\xfdK\xfe\xcb\xd5\xd5\xe4\x9b\x84~i\x0b\xde7d\x0d\x80<\xcfg\x05\xb5\xc6\x02\xc8\xac\x05`\xac\x9dBP\x96\xe5\xac\x04>x\x00\xae\xf1\xbfG\xe3\xfa\xfaz\x0a666\x16\xda\xed\xe8?\xeb,L\xfe\xfa\xf3\xe2c\x00O\x0f\xbfL\xa6>\xf4\xfe&\x7fum\xed\xcc\x0e\x92eY\x86y\xd8\xc9\xc9\x09\x80\x10\xc2y\x82\xbe4\x17\x82n\xb7\x1b\xf5s\x9e`\xa9hc\xde\x96\xbd\xbes?\x05o*\xfe\x8b\x85\xae\x17\x08\x01\x0a\xa0\x11D|\xbc}\x0f\xc0r\xf8\xe9\x0c\x01\xcaZ:'\xc2d\xf9\xdb\xdf\xbf\xc6k+8\xb5E\x1f@I\x22\x82H\xa4\xf2\xf5!\x07\x12\x82w>\xcfE\x14e\xad\x9cj\x09(P\x12\xc0\x18B\xa2\x19\xa5D\x81\x90@\x03I\x10B=\x81\x0f\xb0\x174\xb30F\x07\x01\xc1$\x09\x10\x00D\xcaZS:\x01\xa8\x97\x9f\xb9\x10}\x082.\x8b\x00\x88)%\x112\xc68\x0f\x08\x14\xbdk\xea\xe4\xba\x22\x0d\xc1\x181*\xc3\x87F\xd60\x04Ex\xb0A\xdd\xb5[\xeb\xf8\x81\x8f5\x91D\x1a!\x8cp:r\xc5\xc0!\x08 \x81@4\x0c\xa3lgq)\x05\xdb\x0f\xf6\x93\xdf\xcejW\xed\xdc]\x9a\xfc\xb8\xed\xc6B\xca+\x98\x06\xd7l\xd9\xca\xe1^\x0a\xca\xef\xbb\xc9\x7f^\xb4&K\xf4~\xf1\x09\x80\x95\xa3\xbd \x9c+\xd1\xe9`<\x92W\x0e\x1fN?\x83\x9em\xb9\x91\x884\x14\xd4\xf0\xa0{\xcc\xca<3>\xc4\xbc\xe2\x19\x0c\xcaYU\x14W#\xf6TU(F\x81>\xc8\x18\x8eT\xc6\x86\x0b\xc7\x14E-\xbc\xc0jW\x8c\x9b\x82\x14\x18B\xc8l\xa4\x90\xad\xadD\x16\xfa\xfd\x9a. S3+\xca\x9f\xa8\x94\x8c\x12\x9c\x0fy&\xc087s\x1f\xc4\xfeJ3c8.0\x1a\x12\x1cn+\x80\x22X_\xeaZ\x82\x16\xe8G\xc3\x22\x8d\xebh\x05h\xe3\xd0\xb3\xb6U\x96q\x5c\xd7\x12\xbc\xfaq\x90\x82\xdd\x1f\x07\xfba\xd8/;\x17\xde\xc9\xb8\x05`\xf3\xe8[\xf3\x9d\xfc\xa9\x82yfk{\xfd\xd3\xb97\xda\x19\x82\x82\xbc\xdeK\xdf\xd5\x8b\xe1\xd2\x97>#\xdb5\xbeM\xaf\xfet\x9c~\x06W|:N\x7f\xfc^U'\x8d\xfb\xf8?\x00C\xaf1r\x91V\xb4\xa9\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = "\x00\x05\x00o\xa6S\x00i\x00c\x00o\x00n\x00s\x00\x08\x0f\x07Z\xc7\x00e\x00x\x00i\x00t\x00.\x00p\x00n\x00g\x00\x08\x06|Z\x07\x00c\x00o\x00p\x00y\x00.\x00p\x00n\x00g\x00\x15\x07=mg\x00l\x00o\x00c\x00k\x00_\x00t\x00o\x00_\x00s\x00e\x00l\x00e\x00c\x00t\x00i\x00o\x00n\x00.\x00p\x00n\x00g\x00\x0a\x0a\xc8\xfb\x07\x00f\x00o\x00l\x00d\x00e\x00r\x00.\x00p\x00n\x00g\x00\x09\x07\xc4\x83\x87\x00c\x00o\x00p\x00y\x001\x00.\x00p\x00n\x00g\x00\x09\x0a\xa8\xbaG\x00p\x00a\x00s\x00t\x00e\x00.\x00p\x00n\x00g\x00\x0c\x0a\xdc2G\x00c\x00o\x00l\x00l\x00a\x00p\x00s\x00e\x00.\x00p\x00n\x00g\x00\x09\x07\xc5\x83\x87\x00c\x00o\x00p\x00y\x002\x00.\x00p\x00n\x00g\x00\x0d\x02\x0b\xb9g\x00c\x00o\x00p\x00y\x00_\x00m\x00o\x00v\x00e\x00.\x00p\x00n\x00g\x00\x0c\x02\xde\x06G\x00r\x00e\x00t\x00a\x00r\x00g\x00e\x00t\x00.\x00p\x00n\x00g\x00\x0b\x0cj,G\x00r\x00e\x00f\x00r\x00e\x00s\x00h\x00.\x00p\x00n\x00g\x00\x0b\x07i\xd9\x07\x00r\x00e\x00p\x00l\x00a\x00c\x00e\x00.\x00p\x00n\x00g\x00\x11\x09z\xac\x87\x00f\x00i\x00l\x00t\x00e\x00r\x00_\x00a\x00c\x00t\x00i\x00v\x00e\x00.\x00p\x00n\x00g\x00\x13\x0d\xd8\xcb\x07\x00f\x00i\x00l\x00t\x00e\x00r\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00.\x00p\x00n\x00g\x00\x0c\x07o\x9c\xa7\x00a\x00p\x00p\x00_\x00i\x00c\x00o\x00n\x00.\x00p\x00n\x00g"
qt_resource_struct = "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0f\x00\x00\x00\x02\x00\x00\x00\xec\x00\x00\x00\x00\x00\x01\x00\x00#\xb5\x00\x00\x01\x0c\x00\x00\x00\x00\x00\x01\x00\x00)\x15\x00\x00\x00&\x00\x00\x00\x00\x00\x01\x00\x00\x03\xc7\x00\x00\x00<\x00\x00\x00\x00\x00\x01\x00\x00\x07\xe6\x00\x00\x01F\x00\x00\x00\x00\x00\x01\x00\x004[\x00\x00\x01\xb6\x00\x00\x00\x00\x00\x01\x00\x00F\x02\x00\x00\x00\x86\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x89\x00\x00\x00\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xa2\x00\x00\x01b\x00\x00\x00\x00\x00\x01\x00\x00=\xa2\x00\x00\x00\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x14\x96\x00\x00\x00l\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x0d\x00\x00\x00\xb6\x00\x00\x00\x00\x00\x01\x00\x00\x1a\x9a\x00\x00\x01*\x00\x00\x00\x00\x00\x01\x00\x001\x17\x00\x00\x01\x8a\x00\x00\x00\x00\x00\x01\x00\x00A\xdd\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 2,466.136364
| 51,556
| 0.745185
| 12,185
| 54,255
| 3.310628
| 0.200739
| 0.060387
| 0.040382
| 0.021418
| 0.132573
| 0.126351
| 0.120749
| 0.113138
| 0.103198
| 0.093034
| 0
| 0.262602
| 0.004737
| 54,255
| 21
| 51,557
| 2,583.571429
| 0.484462
| 0.003373
| 0
| 0
| 0
| 0.333333
| 0.993304
| 0.971404
| 0
| 0
| 0.000148
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
16a096da2a7f4a69888015d4e9649fc292fdc776
| 122
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_agc.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_agc.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_agc.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.jumbo.calculators.calc_agc import CALC_AGC_jumbo
class CALC_AGC_nixi(CALC_AGC_jumbo):
pass
| 20.333333
| 73
| 0.836066
| 19
| 122
| 5
| 0.578947
| 0.294737
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106557
| 122
| 6
| 74
| 20.333333
| 0.87156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
16a90e67ac3d58694823e8776ed4d8cbd014722d
| 11,036
|
py
|
Python
|
armcompiler/app/parser/test_ArmYacc.py
|
SartMorgs/arm-compiler
|
7ad399add76a9bf6fd88b4395208a9e46a622ecf
|
[
"MIT"
] | null | null | null |
armcompiler/app/parser/test_ArmYacc.py
|
SartMorgs/arm-compiler
|
7ad399add76a9bf6fd88b4395208a9e46a622ecf
|
[
"MIT"
] | 4
|
2021-06-08T02:07:01.000Z
|
2021-08-13T06:23:35.000Z
|
armcompiler/app/parser/test_ArmYacc.py
|
SartMorgs/arm-compiler
|
7ad399add76a9bf6fd88b4395208a9e46a622ecf
|
[
"MIT"
] | null | null | null |
import unittest
import armcompiler.app.parser.ArmYacc as ps
class TestArmSyntaticPatternParser(unittest.TestCase):
def test_parsing_sample_load_sub(self):
code = '''
addr1 EQU 0x10
AREA main, CODE, READONLY
main PROC
LDR R0, 0x35
LDR R1, 0x12
ADDS R2, R0, R1 ; teste tesssste
SUBS R3, R0, R1
BL func1
B main
func1 PROC
LDR R1, 0x12
ADDS R2, R0, R1 ; teste tesssste
ENDP
INT0_Handler PROC
SUBS R3, R0, R1
SUBS R3, R0, R1
ENDP
END'''
want = (
('addr1', 'EQU', '0x10'),
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R0; 0x35;') | ('LDR', 'R1; 0x12;') | ('ADDS', 'R2; R0; R1;') | ('SUBS', 'R3; R0; R1;') | ('BL', 'func1') | ('B', 'main') | | | | |"),
('func1', 'PROC', "('LDR', 'R1; 0x12;') | ('ADDS', 'R2; R0; R1;') |", 'ENDP'),
('INT0_Handler', 'PROC', "('SUBS', 'R3; R0; R1;') | ('SUBS', 'R3; R0; R1;') |", 'ENDP'),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_code_with_comments(self):
code = '''
AREA main, CODE, READONLY
; sadsadasdasdasd
main PROC
;sadasdasdasd sadsadas
LDR R0, 0x35 ;asdsad
SUBS R4, R0 0x12 ; teste tesssste
END
;sfg,fdlçgfd
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R0; 0x35;') | ('SUBS', 'R4; R0; 0x12;') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_arithmetic_adds_type_2(self):
code = '''
addr1 EQU 0x25
AREA main, CODE, READONLY
main PROC
LDR R0, 0x35
ADDS R2, R0 0x12 ; teste tesssste
END'''
want = (
('addr1', 'EQU', '0x25'),
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R0; 0x35;') | ('ADDS', 'R2; R0; 0x12;') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_arithmetic_subs_type_2(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R0, 0x35
SUBS R4, R0 0x12 ; teste tesssste
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R0; 0x35;') | ('SUBS', 'R4; R0; 0x12;') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_comparison_cmn_type_1(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R5, -0x11
LDR R4, -0x11
CMN R5, R4
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R5; -0x11;') | ('LDR', 'R4; -0x11;') | ('CMN', 'R5; R4;') | |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_comparison_cmn_type_2(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R5, -0x11
CMN R5, -0x11
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R5; -0x11;') | ('CMN', 'R5; -0x11;') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_comparison_cmp_type_1(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R5, 0x15
LDR R4, 0x15
CMP R5, R4
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R5; 0x15;') | ('LDR', 'R4; 0x15;') | ('CMP', 'R5; R4;') | |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_comparison_cmp_type_2(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R5, 0x15
CMP R5, 0x15
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R5; 0x15;') | ('CMP', 'R5; 0x15;') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_movs_type_1(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R5, 0x15
MOVS R4, R5
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R5; 0x15;') | ('MOVS', 'R4; R5;') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_movs_type_2(self):
code = '''
AREA main, CODE, READONLY
main PROC
MOVS R4, 0x15
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', ('MOVS', 'R4; 0x15;')),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_beq_type1(self):
code = '''
addr1 EQU 0x10
AREA main, CODE, READONLY
main PROC
MOVS R4, 0x15
BEQ R4
END
'''
want = (
('addr1', 'EQU', '0x10'),
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('MOVS', 'R4; 0x15;') | ('BEQ', 'R4') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_beq_type_2(self):
code = '''
addr1 EQU 0x10
AREA main, CODE, READONLY
main PROC
MOVS R4, 0x15
BEQ addr1
END
'''
want = (
('addr1', 'EQU', '0x10'),
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('MOVS', 'R4; 0x15;') | ('BEQ', 'addr1') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_blt_type_2(self):
code = '''
AREA main, CODE, READONLY
main PROC
MOVS R4, 0x15
BLT addr1
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('MOVS', 'R4; 0x15;') | ('BLT', 'addr1') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_bl_type_2(self):
code = '''
AREA main, CODE, READONLY
main PROC
MOVS R4, 0x15
BLT move_and_compare
move_and_compare PROC
LDR R4, 0x15
MOVS R5, R4
CMN R5, R4
ENDP
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('MOVS', 'R4; 0x15;') | ('BLT', 'move_and_compare') |"),
('move_and_compare', 'PROC', "('LDR', 'R4; 0x15;') | ('MOVS', 'R5; R4;') | ('CMN', 'R5; R4;') | |",'ENDP'),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_bx_type_1(self):
code = '''
AREA main, CODE, READONLY
main PROC
MOVS R4, 0x15
BLT move_and_compare
BX R14
move_and_compare PROC
LDR R4, 0x15
MOVS R5, R4
CMN R5, R4
ENDP
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('MOVS', 'R4; 0x15;') | ('BLT', 'move_and_compare') | ('BX', 'R14') | |"),
('move_and_compare', 'PROC', "('LDR', 'R4; 0x15;') | ('MOVS', 'R5; R4;') | ('CMN', 'R5; R4;') | |",'ENDP'),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_nop(self):
code = '''
AREA main, CODE, READONLY
main PROC
NOP
NOP
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "NOP | NOP |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_bypass_b_type_1(self):
code = '''
AREA main, CODE, READONLY
main PROC
MOVS R4, 0x15
B main
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('MOVS', 'R4; 0x15;') | ('B', 'main') |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_store_type_1(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R5, 0x15
LDR R4, 0x15
CMN R5, R4
STR R4, R1
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R5; 0x15;') | ('LDR', 'R4; 0x15;') | ('CMN', 'R5; R4;') | ('STR', 'R4; R1;') | | |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
def test_parsing_store_type_2(self):
code = '''
AREA main, CODE, READONLY
main PROC
LDR R5, 0x15
LDR R4, 0x15
CMN R5, R4
STR R4, 0x21
END
'''
want = (
('AREA', 'main', 'CODE', 'READONLY'),
('main', 'PROC', "('LDR', 'R5; 0x15;') | ('LDR', 'R4; 0x15;') | ('CMN', 'R5; R4;') | ('STR', 'R4; 0x21;') | | |"),
'END'
)
parser = ps.ArmSyntaticPatternParser()
parser.build()
got = parser.parsing(code)
error_message = f'\nWanted value \n{want} \nis not equal to gotted value \n{got}'
self.assertEqual(want, got, error_message)
if __name__ == '__main__':
unittest.main()
| 21.810277
| 164
| 0.59315
| 1,456
| 11,036
| 4.39217
| 0.065934
| 0.047537
| 0.071306
| 0.118843
| 0.959812
| 0.959812
| 0.949492
| 0.943081
| 0.941204
| 0.925567
| 0
| 0.046025
| 0.22037
| 11,036
| 505
| 165
| 21.853465
| 0.697234
| 0
| 0
| 0.786082
| 0
| 0.023196
| 0.493929
| 0
| 0
| 0
| 0.026096
| 0
| 0.048969
| 1
| 0.048969
| false
| 0.020619
| 0.005155
| 0
| 0.056701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16b2d96a689d65614a449f704bcfe25251b7d98e
| 21,713
|
py
|
Python
|
octavia/tests/unit/controller/worker/v2/flows/test_load_balancer_flows.py
|
zhangi/octavia
|
e68c851fecf55e1b5ffe7d5b849f729626af28a3
|
[
"Apache-2.0"
] | 129
|
2015-06-23T08:06:23.000Z
|
2022-03-31T12:38:20.000Z
|
octavia/tests/unit/controller/worker/v2/flows/test_load_balancer_flows.py
|
zhangi/octavia
|
e68c851fecf55e1b5ffe7d5b849f729626af28a3
|
[
"Apache-2.0"
] | 6
|
2016-05-20T11:05:27.000Z
|
2021-03-23T06:05:52.000Z
|
octavia/tests/unit/controller/worker/v2/flows/test_load_balancer_flows.py
|
zhangi/octavia
|
e68c851fecf55e1b5ffe7d5b849f729626af28a3
|
[
"Apache-2.0"
] | 166
|
2015-07-15T16:24:05.000Z
|
2022-03-02T20:54:36.000Z
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_utils import uuidutils
from taskflow.patterns import linear_flow as flow
from octavia.common import constants
from octavia.common import exceptions
from octavia.controller.worker.v2.flows import flow_utils
from octavia.controller.worker.v2.flows import load_balancer_flows
import octavia.tests.unit.base as base
# NOTE: We patch the get_network_driver for all the calls so we don't
# inadvertently make real calls.
@mock.patch('octavia.common.utils.get_network_driver')
class TestLoadBalancerFlows(base.TestCase):
def setUp(self):
super().setUp()
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
self.conf.config(
group="controller_worker",
amphora_driver='amphora_haproxy_rest_driver')
self.conf.config(group="nova", enable_anti_affinity=False)
self.LBFlow = load_balancer_flows.LoadBalancerFlows()
def test_get_create_load_balancer_flow(self, mock_get_net_driver):
amp_flow = self.LBFlow.get_create_load_balancer_flow(
constants.TOPOLOGY_SINGLE)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
def test_get_create_active_standby_load_balancer_flow(
self, mock_get_net_driver):
amp_flow = self.LBFlow.get_create_load_balancer_flow(
constants.TOPOLOGY_ACTIVE_STANDBY)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
def test_get_create_anti_affinity_active_standby_load_balancer_flow(
self, mock_get_net_driver):
self.conf.config(group="nova", enable_anti_affinity=True)
self._LBFlow = load_balancer_flows.LoadBalancerFlows()
amp_flow = self._LBFlow.get_create_load_balancer_flow(
constants.TOPOLOGY_ACTIVE_STANDBY)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.SERVER_GROUP_ID, amp_flow.provides)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
self.conf.config(group="nova", enable_anti_affinity=False)
def test_get_create_bogus_topology_load_balancer_flow(
self, mock_get_net_driver):
self.assertRaises(exceptions.InvalidTopology,
self.LBFlow.get_create_load_balancer_flow,
'BOGUS')
def test_get_delete_load_balancer_flow(self, mock_get_net_driver):
lb_mock = mock.Mock()
listener_mock = mock.Mock()
listener_mock.id = '123'
lb_mock.listeners = [listener_mock]
lb_flow = self.LBFlow.get_delete_load_balancer_flow(lb_mock)
self.assertIsInstance(lb_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER, lb_flow.requires)
self.assertIn(constants.SERVER_GROUP_ID, lb_flow.requires)
self.assertIn(constants.PROJECT_ID, lb_flow.requires)
self.assertEqual(0, len(lb_flow.provides))
self.assertEqual(3, len(lb_flow.requires))
@mock.patch('octavia.db.repositories.LoadBalancerRepository.get')
@mock.patch('octavia.db.api.get_session', return_value=mock.MagicMock())
def test_get_delete_load_balancer_flow_cascade(self, mock_session,
mock_get_lb,
mock_get_net_driver):
lb_mock = mock.Mock()
listener_mock = mock.Mock()
listener_mock.id = '123'
listener_mock.to_dict.return_value = {'id': '123'}
lb_mock.listeners = [listener_mock]
lb_mock.id = '321'
lb_mock.project_id = '876'
pool_mock = mock.Mock()
pool_mock.id = '345'
pool_mock.to_dict.return_value = {constants.ID: pool_mock.id}
pool_mock.listeners = None
pool_mock.health_monitor = None
pool_mock.members = None
lb_mock.pools = [pool_mock]
l7_mock = mock.Mock()
l7_mock.id = '678'
listener_mock.l7policies = [l7_mock]
mock_get_lb.return_value = lb_mock
lb_dict = {constants.LOADBALANCER_ID: lb_mock.id}
listeners = flow_utils.get_listeners_on_lb(lb_mock)
pools = flow_utils.get_pools_on_lb(lb_mock)
lb_flow = self.LBFlow.get_cascade_delete_load_balancer_flow(
lb_dict, listeners, pools)
self.assertIsInstance(lb_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER, lb_flow.requires)
self.assertIn(constants.LOADBALANCER_ID, lb_flow.requires)
self.assertIn(constants.PROJECT_ID, lb_flow.requires)
self.assertIn(constants.SERVER_GROUP_ID, lb_flow.requires)
self.assertEqual(1, len(lb_flow.provides))
self.assertEqual(4, len(lb_flow.requires))
def test_get_update_load_balancer_flow(self, mock_get_net_driver):
lb_flow = self.LBFlow.get_update_load_balancer_flow()
self.assertIsInstance(lb_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER, lb_flow.requires)
self.assertIn(constants.UPDATE_DICT, lb_flow.requires)
self.assertEqual(0, len(lb_flow.provides))
self.assertEqual(3, len(lb_flow.requires))
def test_get_post_lb_amp_association_flow(self, mock_get_net_driver):
amp_flow = self.LBFlow.get_post_lb_amp_association_flow(
'123', constants.TOPOLOGY_SINGLE)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.UPDATE_DICT, amp_flow.requires)
self.assertIn(constants.LOADBALANCER, amp_flow.provides)
self.assertEqual(1, len(amp_flow.provides))
self.assertEqual(2, len(amp_flow.requires))
# Test Active/Standby path
amp_flow = self.LBFlow.get_post_lb_amp_association_flow(
'123', constants.TOPOLOGY_ACTIVE_STANDBY)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.UPDATE_DICT, amp_flow.requires)
self.assertIn(constants.AMPHORAE, amp_flow.provides)
self.assertIn(constants.AMP_VRRP_INT, amp_flow.provides)
self.assertIn(constants.AMPHORAE_NETWORK_CONFIG, amp_flow.provides)
self.assertIn(constants.LOADBALANCER, amp_flow.provides)
self.assertEqual(2, len(amp_flow.requires), amp_flow.requires)
self.assertEqual(4, len(amp_flow.provides), amp_flow.provides)
# Test mark_active=False
amp_flow = self.LBFlow.get_post_lb_amp_association_flow(
'123', constants.TOPOLOGY_ACTIVE_STANDBY)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.UPDATE_DICT, amp_flow.requires)
self.assertIn(constants.AMPHORAE, amp_flow.provides)
self.assertIn(constants.AMPHORAE_NETWORK_CONFIG, amp_flow.provides)
self.assertIn(constants.AMP_VRRP_INT, amp_flow.provides)
self.assertIn(constants.LOADBALANCER, amp_flow.provides)
self.assertEqual(2, len(amp_flow.requires), amp_flow.requires)
self.assertEqual(4, len(amp_flow.provides), amp_flow.provides)
def test_get_create_load_balancer_flows_single_listeners(
self, mock_get_net_driver):
create_flow = (
self.LBFlow.get_create_load_balancer_flow(
constants.TOPOLOGY_SINGLE, True
)
)
self.assertIsInstance(create_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, create_flow.requires)
self.assertIn(constants.UPDATE_DICT, create_flow.requires)
self.assertIn(constants.LISTENERS, create_flow.provides)
self.assertIn(constants.AMPHORA, create_flow.provides)
self.assertIn(constants.AMPHORA_ID, create_flow.provides)
self.assertIn(constants.AMPHORA_NETWORK_CONFIG, create_flow.provides)
self.assertIn(constants.AMP_DATA, create_flow.provides)
self.assertIn(constants.COMPUTE_ID, create_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, create_flow.provides)
self.assertIn(constants.LOADBALANCER, create_flow.provides)
self.assertIn(constants.DELTAS, create_flow.provides)
self.assertIn(constants.ADDED_PORTS, create_flow.provides)
self.assertIn(constants.SERVER_PEM, create_flow.provides)
self.assertIn(constants.SUBNET, create_flow.provides)
self.assertIn(constants.VIP, create_flow.provides)
self.assertEqual(6, len(create_flow.requires))
self.assertEqual(13, len(create_flow.provides),
create_flow.provides)
def test_get_create_load_balancer_flows_active_standby_listeners(
self, mock_get_net_driver):
create_flow = (
self.LBFlow.get_create_load_balancer_flow(
constants.TOPOLOGY_ACTIVE_STANDBY, True
)
)
self.assertIsInstance(create_flow, flow.Flow)
self.assertIn(constants.AVAILABILITY_ZONE, create_flow.requires)
self.assertIn(constants.BUILD_TYPE_PRIORITY, create_flow.requires)
self.assertIn(constants.FLAVOR, create_flow.requires)
self.assertIn(constants.LOADBALANCER_ID, create_flow.requires)
self.assertIn(constants.SERVER_GROUP_ID, create_flow.requires)
self.assertIn(constants.UPDATE_DICT, create_flow.requires)
self.assertIn(constants.ADDED_PORTS, create_flow.provides)
self.assertIn(constants.AMP_DATA, create_flow.provides)
self.assertIn(constants.AMP_VRRP_INT, create_flow.provides)
self.assertIn(constants.AMPHORA, create_flow.provides)
self.assertIn(constants.AMPHORAE, create_flow.provides)
self.assertIn(constants.AMPHORA_ID, create_flow.provides)
self.assertIn(constants.AMPHORA_NETWORK_CONFIG, create_flow.provides)
self.assertIn(constants.AMPHORAE_NETWORK_CONFIG, create_flow.provides)
self.assertIn(constants.COMPUTE_ID, create_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, create_flow.provides)
self.assertIn(constants.DELTAS, create_flow.provides)
self.assertIn(constants.LOADBALANCER, create_flow.provides)
self.assertIn(constants.LISTENERS, create_flow.provides)
self.assertIn(constants.SERVER_PEM, create_flow.provides)
self.assertIn(constants.SUBNET, create_flow.provides)
self.assertIn(constants.VIP, create_flow.provides)
self.assertEqual(6, len(create_flow.requires), create_flow.requires)
self.assertEqual(16, len(create_flow.provides),
create_flow.provides)
def _test_get_failover_LB_flow_single(self, amphorae):
lb_mock = mock.MagicMock()
lb_mock.id = uuidutils.generate_uuid()
lb_mock.topology = constants.TOPOLOGY_SINGLE
failover_flow = self.LBFlow.get_failover_LB_flow(amphorae, lb_mock)
self.assertIsInstance(failover_flow, flow.Flow)
self.assertIn(constants.AVAILABILITY_ZONE, failover_flow.requires)
self.assertIn(constants.BUILD_TYPE_PRIORITY, failover_flow.requires)
self.assertIn(constants.FLAVOR, failover_flow.requires)
self.assertIn(constants.LOADBALANCER, failover_flow.requires)
self.assertIn(constants.LOADBALANCER_ID, failover_flow.requires)
self.assertIn(constants.ADDED_PORTS, failover_flow.provides)
self.assertIn(constants.AMPHORA, failover_flow.provides)
self.assertIn(constants.AMPHORA_ID, failover_flow.provides)
self.assertIn(constants.AMPHORAE_NETWORK_CONFIG,
failover_flow.provides)
self.assertIn(constants.BASE_PORT, failover_flow.provides)
self.assertIn(constants.COMPUTE_ID, failover_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, failover_flow.provides)
self.assertIn(constants.DELTA, failover_flow.provides)
self.assertIn(constants.LOADBALANCER, failover_flow.provides)
self.assertIn(constants.SERVER_PEM, failover_flow.provides)
self.assertIn(constants.VIP, failover_flow.provides)
self.assertIn(constants.VIP_SG_ID, failover_flow.provides)
self.assertEqual(6, len(failover_flow.requires),
failover_flow.requires)
self.assertEqual(12, len(failover_flow.provides),
failover_flow.provides)
def test_get_failover_LB_flow_no_amps_single(self, mock_get_net_driver):
self._test_get_failover_LB_flow_single([])
def test_get_failover_LB_flow_one_amp_single(self, mock_get_net_driver):
amphora_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_STANDALONE,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: None, constants.VRRP_IP: None}
self._test_get_failover_LB_flow_single([amphora_dict])
def test_get_failover_LB_flow_one_bogus_amp_single(self,
mock_get_net_driver):
amphora_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: 'bogus',
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: None, constants.VRRP_IP: None}
self._test_get_failover_LB_flow_single([amphora_dict])
def test_get_failover_LB_flow_two_amp_single(self, mock_get_net_driver):
amphora_dict = {constants.ID: uuidutils.generate_uuid()}
amphora2_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_STANDALONE,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: None, constants.VRRP_IP: None}
self._test_get_failover_LB_flow_single([amphora_dict, amphora2_dict])
def _test_get_failover_LB_flow_no_amps_act_stdby(self, amphorae):
lb_mock = mock.MagicMock()
lb_mock.id = uuidutils.generate_uuid()
lb_mock.topology = constants.TOPOLOGY_ACTIVE_STANDBY
failover_flow = self.LBFlow.get_failover_LB_flow(amphorae, lb_mock)
self.assertIsInstance(failover_flow, flow.Flow)
self.assertIn(constants.AVAILABILITY_ZONE, failover_flow.requires)
self.assertIn(constants.BUILD_TYPE_PRIORITY, failover_flow.requires)
self.assertIn(constants.FLAVOR, failover_flow.requires)
self.assertIn(constants.LOADBALANCER, failover_flow.requires)
self.assertIn(constants.LOADBALANCER_ID, failover_flow.requires)
self.assertIn(constants.ADDED_PORTS, failover_flow.provides)
self.assertIn(constants.AMPHORA, failover_flow.provides)
self.assertIn(constants.AMPHORA_ID, failover_flow.provides)
self.assertIn(constants.AMPHORAE_NETWORK_CONFIG,
failover_flow.provides)
self.assertIn(constants.BASE_PORT, failover_flow.provides)
self.assertIn(constants.COMPUTE_ID, failover_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, failover_flow.provides)
self.assertIn(constants.DELTA, failover_flow.provides)
self.assertIn(constants.LOADBALANCER, failover_flow.provides)
self.assertIn(constants.SERVER_PEM, failover_flow.provides)
self.assertIn(constants.VIP, failover_flow.provides)
self.assertIn(constants.VIP_SG_ID, failover_flow.provides)
self.assertEqual(6, len(failover_flow.requires),
failover_flow.requires)
self.assertEqual(12, len(failover_flow.provides),
failover_flow.provides)
def test_get_failover_LB_flow_no_amps_act_stdby(self, mock_get_net_driver):
self._test_get_failover_LB_flow_no_amps_act_stdby([])
def test_get_failover_LB_flow_one_amps_act_stdby(self, amphorae):
amphora_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_MASTER,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: None, constants.VRRP_IP: None}
self._test_get_failover_LB_flow_no_amps_act_stdby([amphora_dict])
def test_get_failover_LB_flow_two_amps_act_stdby(self,
mock_get_net_driver):
amphora_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_MASTER,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '192.0.2.46'}
amphora2_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_BACKUP,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '2001:db8::46'}
self._test_get_failover_LB_flow_no_amps_act_stdby([amphora_dict,
amphora2_dict])
def test_get_failover_LB_flow_three_amps_act_stdby(self,
mock_get_net_driver):
amphora_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_MASTER,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '192.0.2.46'}
amphora2_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_BACKUP,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '2001:db8::46'}
amphora3_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: 'bogus',
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: None, constants.VRRP_IP: None}
self._test_get_failover_LB_flow_no_amps_act_stdby(
[amphora_dict, amphora2_dict, amphora3_dict])
def test_get_failover_LB_flow_two_amps_bogus_act_stdby(
self, mock_get_net_driver):
amphora_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: 'bogus',
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '192.0.2.46'}
amphora2_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_MASTER,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '2001:db8::46'}
self._test_get_failover_LB_flow_no_amps_act_stdby([amphora_dict,
amphora2_dict])
def test_get_failover_LB_flow_two_amps_standalone_act_stdby(
self, mock_get_net_driver):
amphora_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_STANDALONE,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '192.0.2.46'}
amphora2_dict = {constants.ID: uuidutils.generate_uuid(),
constants.ROLE: constants.ROLE_MASTER,
constants.COMPUTE_ID: uuidutils.generate_uuid(),
constants.VRRP_PORT_ID: uuidutils.generate_uuid(),
constants.VRRP_IP: '2001:db8::46'}
self._test_get_failover_LB_flow_no_amps_act_stdby([amphora_dict,
amphora2_dict])
| 48.903153
| 79
| 0.685488
| 2,558
| 21,713
| 5.485927
| 0.090305
| 0.094919
| 0.166108
| 0.111167
| 0.87337
| 0.860828
| 0.838951
| 0.816148
| 0.802038
| 0.773035
| 0
| 0.00814
| 0.230553
| 21,713
| 443
| 80
| 49.013544
| 0.831807
| 0.033206
| 0
| 0.692529
| 0
| 0
| 0.014829
| 0.006771
| 0
| 0
| 0
| 0
| 0.416667
| 1
| 0.066092
| false
| 0
| 0.028736
| 0
| 0.097701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16bc4c6b913368122f9dd1856405e8cb45243db9
| 155
|
py
|
Python
|
TEMPLATE/tests/test_helloworld.py
|
Nikitha-ramasetti/python_project_template
|
5f70a086bad393c5314b5c0ced6d617fa284daba
|
[
"Apache-2.0"
] | 4
|
2019-01-02T15:01:40.000Z
|
2021-02-03T11:40:58.000Z
|
TEMPLATE/tests/test_helloworld.py
|
Nikitha-ramasetti/python_project_template
|
5f70a086bad393c5314b5c0ced6d617fa284daba
|
[
"Apache-2.0"
] | 10
|
2020-04-20T21:37:21.000Z
|
2022-03-26T22:20:15.000Z
|
TEMPLATE/tests/test_helloworld.py
|
Nikitha-ramasetti/python_project_template
|
5f70a086bad393c5314b5c0ced6d617fa284daba
|
[
"Apache-2.0"
] | 1
|
2020-06-22T10:59:24.000Z
|
2020-06-22T10:59:24.000Z
|
# TODO
from TEMPLATE.helloworld import hello
def test_hello():
assert hello() == "Hello, World!"
assert hello(french=True) == "Bonjour, Monde!"
| 17.222222
| 50
| 0.670968
| 19
| 155
| 5.421053
| 0.736842
| 0.213592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.187097
| 155
| 8
| 51
| 19.375
| 0.81746
| 0.025806
| 0
| 0
| 0
| 0
| 0.187919
| 0
| 0
| 0
| 0
| 0.125
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16ce43f5f1b2088d81281507577d854da66381ee
| 27,555
|
py
|
Python
|
img.py
|
sandakelum15/y2v
|
37d71ba3b7e5686435e2eaca3d7a9ced3f1875d9
|
[
"MIT"
] | 1
|
2020-04-23T17:07:21.000Z
|
2020-04-23T17:07:21.000Z
|
img.py
|
sandakelum15/y2v
|
37d71ba3b7e5686435e2eaca3d7a9ced3f1875d9
|
[
"MIT"
] | null | null | null |
img.py
|
sandakelum15/y2v
|
37d71ba3b7e5686435e2eaca3d7a9ced3f1875d9
|
[
"MIT"
] | null | null | null |
bg ='R0lGODlhvAKQAef+AAABAAgBABABAhsABhMFBhgEBx8HBQsNCjYEBh0OECsMDkEGCUkGCRQWFDgMDEQJBSoWFhscGlkKDkYTFT4XGmoNFCEjIEgcICUnJHgTFykrKFAgIRUvV4cSGiwuLB4vWEMoKSMvUyouVDMtUTEuVlsmKBw1Xo8XHjstU0IsUD0uTzI0MkkrTEUtTFErSlArTl4pSVgrTVorSJobHyc6XmcpSGErSWgrMDg6N2wpRZ0dIXQnQ4QkOoQkP3wnQn8nPoYlPJwfNocmN40kO50gMVczUpUiOF00NacfJqYfMD0/PJckNKAiLqggLLAeKnUwM5EnOLIgJkBCQDpCZqwlKrUkLkRGQ0NGZEhKR4U5PXE9XKsuOzlObrUuKpA3TLUuNbQuOk1PTLcxN4I/V3xBWHVFSKI4SlJObmBLZ2ZKZk1RcFFTULo1P0hVd0JYeFNWU5hCRLk6L1ZYVbk8QbI+SVpdWrtDMq9EV7ZFSF1dgMBDTKZKYGBiX2NlYsBKTrNOVr1SOWdpZqxVW8ROV7lSU8ZQWZleXmttasRVWW5wbZFkgcdYXLxcX8hZXXFzcMpbZLthYbdidn9xmXZ4dchgZp9rjXh6d351nrtpa8xkaXt9en6AfZ11lsxqbLFyic1sdISAnMFxcYKEgcpwdIaIhc90eMl4epCJptR4fZKIs4uOitB8fZuKpI6RjtJ+hdWAgpOVkteGTteCiceIh9KGiZaYlcmMkJudmtmLj9qQVZ+hnrqYsdmRk86VmNWVmKygvrmetqaopc+dndqan9WcnquuqtuipOWlXuGhpa+yr7muyN+lp9ypqbS2s9itsLi6t+OwsOKwtte0tLu9uuC1t7/BvvG7buS7vsPGw/a/bMbJxt7DwsjLyPrEcOjDxMzOy8/Rzu3JyejLyuLNyv3MddLV0tXY1O7R0OrV2Nnb2N3g3e/b3uDi3+7h4uPl4vXh4+fp5unr6Pbp6uvu6u/x7vvu7/fy8PH08PT28/r18/75+Pn7+P/8+/z++wAAAAAAACH+EUNyZWF0ZWQgd2l0aCBHSU1QACH5BAEKAP8ALAAAAAC8ApABAAj+ADvoGEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmyZMgOM6KoXMmypcuXMGPKnEmzps2bOHPq3Mmzp8+fQIMKHUq0qNGjSJMqXcoU6YyUTaNKnUq1qtWrWLNq3cq1q9emT7+KHUu2rNmzaNOqXcsWZti2cLdSCeVs27p8/PLq3cu3r9+/gAMLHky4sOG/7LbNYhS3sePHkKu+jUz5KJNd7vQd3sy5s+fPoPPmM+cJSeXTqFOrZjl5tWubgqbRC027tu3bh9UJ2/K6t+/fXlsDB06nGt6++vRpk0atufPn0KNLn069uvXr2LNrdy5NnWa/8mz+URlOvrz5osLPq541zy+6XpDUR0UyS5o8v+AEyd/Pvz9rqP5RtkUz+/CVzziYBBiVGLa0851e9Hii4IQUupZehWvNoc2DeUkTH4ZNmYIOX/jYAuKJKLJ1YYpkUYENX+3cwWJTsxwn2iwz5qijVivuyNU0fI2Dh49KhXKfXvlISOSSTBrVY5NV2cLXNlAiBUk7e8VDR5VcdnnTk14uBUc8ewkTZlF4pLNXNWe26SaYbhoFpF7jzBGnUKHMlpc+Cd7pJ5Nw/gkUJvboJQ9jgv7Uy17pmJbooykGCulO2uwlzaQ9sbGOXvrgiOmnCkoKak2CPCjPF6PqZMqD6oyX6qv+5okKK0zO7OXprDZtg2QouPbam6y+rjSOXvYEW5MtD/Zi7LKVAWtsPXpd2lgvzojTzrXYYruOM87cGhkSR/IzDrPkPuZssA96G9Ms27Tr7rvwuivMkCrhIUy8+MKrbhT2YrnZNr3Y2dIcveRrsLyEqPSFLQcfbEzCNg2blzmulmvxWuf6uhdvMpnyjmf1KCsMtJ29w2sUc2xDsmfyXMPSLOF2Zg8zMINmDzU21ZpXPPRe7PNZGfe6lxgznQOaPHPEzJk4jHhzmz7b8PpxaPX4a3OfM+nMjz4Q/+z1WEHjOrRMs3Aoc2j62LiXPvBM00wdcMddByzTTDOP2Xtuilv+aOLUpDU/XX8tOFdhzzp2TFLq9cwtjDfu+OOAPS6549UElk4rAGSu+eacB3KLOoIFM/nojKutVzKkj64nP+f4vVfgg8d+VeGwHg5T4nmVw/num+PNTz68B68mct/wcUDwyAPQgCPgmK4XLMnz7jw/jkTPuQbt5dU6TX/DLvv3UtH+qu0vERKuPW9Yn/mLfU2jPgB9OA+PLu8n3weZ7def+Td93VOH/rDYy/ay9jrwGXAq4ksV+V4ijr1M4n3s40sy3heMvgBPf8lTAjvyp79n9CUdGAygXkzhOr1474AodBKAZLdAlxhjL9h4nyb8IoX38W8v8zgEBqOnhBtCCIP+HuQLCAG4FxJyr4ApTOJREjiqFrpEbRFQXx36Yg8cqA8LBqreDq3nw7xgsIJCxCDoRljCvJxQiWjsCRNB5cSW6EovpFCfFfCnl2+8Lxl8ieEWrRcGDu3wg2LUyzsERkATpvGQQFnjp9rIEmHsxY7qKwdf3Kc+eOzFHn3Yo/rAmJdmYLAvE6wfDtyhl3bkDImITKVOFIkpRrJkLxe0Hjj4osXoRYAvnlTfIcpBD3p843/qwwApO/lJvmCufpPYyzpOaUhVOvNLK4ydK1fyxq2pQn2buCQf1NcMvkDPelaQJIRqyE29sAOD2dPLMd+XTL0oq4yAe6Y8acLKSU1TJYv+qiM2GfW+1XlRfZboyzetdwhiYTCCeVnnPt3JTDPO86Fuiebg7hkFQqwsHnKk4xCjxwfTvW9Oa3qfHPRkj4MaU39dfOcRmwnRlqqknpCiaBSMpk71bTChm+SLOt43S75Uo37D48ct9IfQeAQApXqpx02659KmwvRRMs1nXsChPm7U1HrD1AslZdmXn74vqPSrny6yhEEfKrWh8WxqS5+aKJkyIlz0kIP1brEz9YmCg1z1KVD1wg39TVEvGK0fFpO6VFSqdZ5sFZRMo1BNfjwwenTlR2Cjh8dJ8rQvidhrXtzhV7IKdi8uQ+sZD4vIxP5psVLlBySjNxs9Js8DN93+y1aj11O9uGMFmuUHZ+v317xsVH2O2IszCstS0j7TtH5a7BwKJRorWG82wbBeH/wy2+TVNi/FqF8fHqSPTdRPCnSMbv1Ayo/hita4iJWo4BYbBYnlRRPW44Y+Mhk9hGr1skgKRP0KuhdR6E+c/BDvR/eCE6ai97jq/Rp7mRFSysYSeQfI6prCQOEKW9jCAOaHVyHIl2vWD8ACVl/l9FJgwx64tAn2GnujwBfrJWO3yWtnbdBXvwiMUS/m+K9eQmw9vpS4uCc+JHLvtOJr9Dd6B3iG9RC6ss5QtX5y6EuO61dZfVhCf6D9sUODnMohx2nFqX1y8pQcPbUlzTPukOv+ePuCDzUv9MHWkzE/cEbcLXNZyCn+2YrnYCN6aFJzm3gQlaIgjHbI49CITvShm8yPONavj36h75v11029mPe8d8azAVfcXk4pdItW1YuZbqK33DVAf9fdi6Stl83fUXovPYPnaDMNPi+7idOO1KcmsbC6fMy6JbkWDTDZOT1+uJbVoiEqrLWcVlor0dZt4rT5bHu8PbZCgDeZtl5y+b4DBJUf0rDRsaPnAVKOm7XLrnOznZ1CaJ+J01EYkV78u0eE7gsmVZD3xDAY2bzIgxA2msep37fB6qI7L+Ni9q/ZLTh3hwneYd7jGtJZjw/RJLX6eOz7lGBJhuqbH1EkOD/+DI48DTBXXDn5GyPYwPKWu/zlMI+5zGdO85rb/OY4z/nMxcDzL3yhCgx3Sp59Bm+L2taKO+w3P/pWE0xwKJT1Qyg6VEKNvYTcpiOvn9ITru5PDOLrYA+72MdO9rKb/exoT7va1872s/vBD3qYAxt+7oSgE8XhXoJ3FKxGvS36UB8mqkkDzYlBUqzNiFVXXNSz/j6ln0zdo1iE5CdP+cpb/vKYz7zmN8/5znv+851HBCIGMQcxAN3uQcF7l/T+Qr2c23pKOHmxasKMB80js/ul49JXAgnZap0f2xywXh6P1lI04vjIT77yl8/85jv/+dCPvvSnT33qL6IQcGdDxVD+zxPVc0nvUbCRO6pdPxHmhekzYUSTs6u/UOelHV14pV608fujvi+r+cBa8avP//77//8AGIDJtwiI4Adz8HPc131DdzHgV2r8QG+5xQ/3NjAfd1v6Uwwcoi6wVH/1k1VTl3J7YXwCOIIkWIImOH2FoAcImIA54X1VAn6tlxcklzx7cVYzEWz8gEn6Mwn+dGnUhCQcqD7gpRcfyGwieIJImIRK+H8E6AdscHosaBMuCCXgx2I/VD+cNGc0USN7wW3BJGFI4xKNFVbW4wj5YH/Wc1dEqBN/c4RL+IZwGIcDWIBzAIVROBNT2CRVqG/64GHqc2P8YEQy4YC6g2qwpH/+KzELe8FjyQN1abgXgWeEcjiJlLiEBfiEd0hPC2gxVYiDjIg8q/MORCMTjaUPWHCBfBGJLcEIe8F+6uOI0aMKkMiGIViJtniLI1iAYlB3mRgTecgkVciKO1Y/q4N+MBEKavNp0VMHJ8cPofUSwlhXd/RhswiCeuGGuJiN2gh9BKgHo9iLL/GLSxKMi0iMemGMLqFtuUN+1qMBscUP7UBIL0FTkkVl1Ohvdrh/27iP/Mh8KZiP4PhSm1gu5DiM/XSOMiENOORc9TNieWEPFgcT9Nhd7+OHPNRx/CAPO9GG/diRHbkITgiQ4CiORFKQefGJwVOMiMMXtaA/tbA20iL+E38zUH+2OXygTBtZi/2wkzzZkz65k/pAgj85lD4pfUS5k9B3lEMZDXBYgKhSE0qpD0qhlP0QLCTpIyYZYOZ4fjHRZK+HPJvQjAPUMXtBkzWZOTepF8aQk9fYCFRJlEH5f2+plM43l/3AfHb5k0yphIswCJgoE3YplUWRl71ylTuSlSjJOyo5j3vhZ/WzBoCokTVhCnuxU2e5Oa2WF2tJi22Zlz0Zl/7nmT9Zl3OJl6LJk/lgid44E4FpFISJK4apI4i5lbvnEsiyFzpUP5UCS4qmaEyjEmwwNfxgmZe5PnuxmdaYF8Z3mkAZmszZk6Zpl8r3nDy5hH05B7z4Ep7+KZhCIZqwOZDkMpsHyZUtAQnNuGE5xRmjZjXwoATFaZyiwRMcSZ0/mQ/0eZ+iGZT4uZ/8eZqzZw9UKZX9uZPyYJdcEps5Ip7qs5grUQWDlxfsYAH75TuDoQ9sEAUOqHGXuZu/I586iQGbUww72Veb4w4D2g+1hDy3IJoMmTkekJcQqDwYIAV1YAm6UA4n2p8tmjnzkKNESX+b8ww+CiUIOiMK+lwI+TKw5F3vgwH+tBmXkloaWpMNAEse2pmOljmisJMhFgY5mqLBs6KeuaMvapcxyjuTcA8+ep87CgA9uqY+CaSaI6Q5SqTgySxHenC1uRIOmJjB4wGAGqiCOqj+gGoFlhIFqbVaZ1mlekFnnKmcbomeSrCTGqoLXzpXLLo5ZTqXZ8o7pACn1NmmbwqqOymnmUOndQood7oseZo8DKoSewFj78k5K3CohHByilqTM9SoVwqp/QAPnEMP/SChmoOjJwqmvCOmeUmmMFpJpHqaovqspco5qHqiTVKkLNKqoJikK+F7s7o7tRotKrEy5nB1NUlezNCr/LCc/eBmAJAOwKo5FsCT+4ANolAHGLAGm/AMasqTnFMH/TBlmhMM/aALm7AJUwoAB7uv/fCvAcs5BNsPzDoNgaABb6ALo+qTZ1oN6nCKm/MNPVmv95qv+9qvDbs5O3lsIjqsm3P+D23aD9WQCCtQB8GADzwZrTspsviqr/z6k/cwDZtQBxagBI5QDPDgk+pAClIgBaqgDqYKANW6D9UgCm+gAXUgCtVgsze7OV76DYmADT55ratqLNqaktwKq/f1rbR6qFHQZB57luTVE/PZDyFWDYr6qf3gDukDrtrgr5sDsAKbORG7k5OVORHgkw4buAAQsTuKAWe6Bibbk2cKsmYJtjupt7yzAn3bD3vLo/3giLXQD/OwOfO6ow1gloewtZvzppjLtz35Dbi1OwdQDTxZDgOnOVcWpDypDp27OVZgDqqbOYlASbTbk2K7aXrxjTsRjVo5nnuKtjKotmsrrlHwN2/+W5Puxw9yq5P9UIiC64i0iw9tyjnGmrgQ25OFCwCH25PmuzmMaz3VypOT2w9DtTnTsJPimzw4yqSZow70i5kPqzl8ILGytJPRmr/IY6y/xTvA2w/6FT10eg/kxDsRZsB/S6zFy5PHCz5lq5hna4XRK72aE64dohJ/A4uapDbb25k7eXWiYJHx0A+uCAAHUAzBlzlysJPtO7Doyznr67eaA7jnS8Cbowu5qzmW+pPzW79zupMzXMM3DAA5zMQAALLwFcQwuzmf2qbcAIsrG61PbMObk8P9cMQAMAk8hrec0wDJ8MBNTLekmwxRPAkWzDsZvJMb/D0dvDuvCsKMJ8L+AEDC4KYSoeCtZ2kjg8aWvrqTEFgHN9wHO+nGAJC6XggA7HCyWKy4g9sP6fvDOvy3AczDRKw5+7DAyiq5nAOyVAy1kbw5lMw57OCF97sGm3O4M2ypL3tORVzHmtOjkvzKm3PJ7AiyUYwF/XAPnJO66MnK/TBsAJAI/VDJwjq+mXPHVamqyJsXyqsTzOunwfrBhizCghyTDVaTliBoK7zI/bBVEUB+K8uOmpDFmwMOmJw5Quy+PVzLiAvKmszLAKABecs5oavEqfy/utsP8CzPmgMOJCq49Zw592CRQtq4Ab05Ax2tCb3M4DC6m7O/nHPMnBPPT0unIa3QmQO81Az+ANacxyyUvD7RzbSJjuEsvePcrXtxmVmYrurKrhW9O+kA0psTz8vctzvs0DzZyfucyUPMrLIKAAOtsQW9ykKKzEFt0gCgDcQJAKrA0ZsDD7mZOdwwygDwovGqObDgz25K1Zoj1JyjDWWdOTg6pfvgmGvdDyMN1HU91GKtOW8AssaLzRzs0j3BvG8jN4Zt2Ccn06532Izd2I5t2IHAtlFgZHrx2JYtN5WWF4iiyOvqljxJy5yDAzup1pnD1psDtkW9uPmsOZ780Pcsykwt0EO5xNSK16Vt1djA1c+c1Sftnprz08z61gBw1ns9D6QNAKatOdgAD4NaDvOAdJpzD3T+fdt3Pd3IA7ZtKgrC+pMsLU2CzRPM6xmKvTeFEZOJR96anc76wNMFuzvEbd3JvT6uHcpGTbg+nNT2TN+qLdYv2tRPjcofa9BNDN+4jdCa00O7s8wxHNycQ9yiSuDLbLk86Q66QKyb00slXd0ittdHO5TdPVF6wTE94V6dwSct8Tfo/Rf5wBJ8luJcp8jxkAmevZNipjlh3Q8Qzjmozc9DbN/6zL48js/83dOZ89/yG9W1neOn3a6bg8Kr7M9k3eBoPQ9Krtw5+w0JqzkYXtUabj20+7Ie/uGCky4/YZ6f8YwrcWYpDhijthIMRt4Q6RPuZQ6LMOP9gA/sCAD9WuX+8p3am4zUQK7UQh7bFj3bSK67fA4AYBuj/OvbuJc5xjzkwu3gnEPlJR3hvyrJFjDBmbPldd3lmpMI8DDqpA4PNrujra3BYv41KxOTPmEM2RLrso4t40B8LdEL6zDrur7rvJ4t23BCkLANvT7ssY6cPAEuRFjnPinJALuTib7jgi7KPs7a+A0Ar23UhK45Rs7Ihz7gl67j7a05IErDHmbhZwzlvyrlxf3sDrw5GqAO/MujBA7qt40PzXDv937JzBq2VYKtKeJes/c1t5kXwnB8PvnVADDAo/3tm0PUQS7tnHzfgZ7f/Tzk/m3oAS7Vto3cVt23lYzDM2zW6D7pU37+3PF91cctovHupvNe2yYvuuC+76re72MbLH8zgeRSTflACwbfkwiv8Bt/8iDr56ttuNV+7fud7UWO8ZoDsi2puy+/zCDb0JuDtbzzxZqa7ptD6asb9aks3CC78pbO5S5f0rp91UP+1zRvQKViKE/pM6uiF+qACD3Pkz8/8QBgCVYNvER/1BIPxBS/1Fl/8QQd4Gd6vw+t98sMvLydOTdqxyOv7tHKOYrPOeYg3B7d9Rle24mv9U2f9jNvpwfEoYPsM5rCKb2AfAdvkz15vZBcyUfLOcZc4/s97UaP97Mv+JpzAP3A20kM1ZvDsbFr5f3g+tHMOUd79qfa1HAd+Vv+P+XFvzmvj/xgX8aVftx0PG506q6B0PuwDPp4fKA1HyyEYiibXS6ppQ6EoPo+z/o8ScVQ7P476m1kWN8R/+PBS8PpUP9JT76VvLIA0U+gQFEADB5EeNCcwFsIDxTjg5DPwAMJAYDDZxGAO4FWEHroBy8hrI4J5/VrePBhxIMT6SW8Bc8jwpMJcbiDlfCZwGIOi01CKKfkQZAD+0VBmlTpUqZNnT6FGlXqVKpUZ8yomlXrVq5dvX7VOo3fWH7j5oBFm1btWrWh6JHV56nR3EZG+x2SaJRdRY0GywlU1RdhMKPxEkawG1iwQcL9Zi7+a7dfwcUAMOATuBfywDoWOeL+sHhvqEGQIhGSdGyyn2bBkcNUBnASb+Wd/e5h2ZyaqF22vX3/jnIV+HDixX/DiUeWnzDjzZ0/x5NOeTW6c+3ONjjRKLYGGiNgG8guQkKggwsftiuefMLGjwHgYHlQlGSCsKdt727xu1HKCAU6SgiDgdwrbaTRDDqpH+68A0+gahJawRLVzMnvoD50GsidzixqoJoBP+LtORGfE25EE098zhbl+NkGRRdfnAqSdpSLh5Lq6jIKOwC0Mwqen7CwQApHiklwIHVIUWIFTb75hr3zEELMriOTXLJJ83QzSJV9kuFDgz5qo68/CPmAJR3JfJwESCGJtEuXhMIQKCeE+vj+cDfTDkLNvdgGQlPNIYsUyBxSrFBCFXeCUa2fKa1QJR1tMDRKG1Xk0ECJQIKBxygCQ4Sx069K9DRUUasSS7lx8Bg1VRFDkUe5fGa5EUf6ZqW1VltvxTVXXXfltVdffwU2WFtVJRYqUItFVlQqsFmxnTuShVatWfJxVZ1FYhU2W2235bZbb78FV6BooT12XHNHnEMbfVaUBpJz35XKFHRWxKcccWK1Llx99+W3X3/5hZfYcgMm+LctmtlnxXzGwaTggMWwpZ11laOHG3pKwVfWfze2VRyOPwa5W4dFHXhkk9GaZZ4Vx0KnF3dPJhaJWaRpdWV16NHHmFI6eSTjWuf+0sdWfTTGNWha8/X2Xrp4BVpoojk2mlZ9ogh3G6FrhdnTkrPmOis6qqF2ZX300UYaas5GO22112a7bbffhjtuueemG21p1Jl4ZXvSwcceY2R5BZVRMrExY8MPRzxxxRdnvHHHHzccET2+6LryZLe2PPOnBJnmrZU/Bz100UcnvXTTT0c99bHUSecefcTBRRbZZVll58Ihxz133XfnPXfJKdc8eK2xEr54qJjYxZ28VWe+eeefhz70fOD5Bp/XeZk9e9o/oaTn3r8HP3zxF//dePNNxPz84qkIxZlt1gk7evnnpz96ePAxR7R6wsFee+0D78TtxjdAAhYQd+VTXwL+i5M+BTbwC4zARLDyUbvHlYIZzMjWBVdxuHx8a2z6yIc95NGOdYiDGv3zXwoDVwrCGdCFL4ThjRDYQBqyhYE1TOADGbFDHvbQhz0cxSgwljsLXtCIR0RiEo24wRhWhxKlcEUKpThFwNmuiVfE4vdmiEMueuWGXQSjU76gB0Rk0Yz4emIUqbhG7dUuE947YxzliK8thtGOUvniHe84xjLO8YxpZGMgZ+cKVATQj4eUYx31uMil5JGRXeQjIrMISEEKcoWdkGQmm6jIRzLSkZ3UXMhEOUpSltKUp0RlKlXJqax8EpSWW2UsZTlLWtbSlrcU2VZc+cqu4dKXvwRmMIX+OcxwcWWXvMwaMZW5TGY205m0NCbxkKnAZ1bTmtfEZjaBFU1oYW1E4qLaUaICTqmQ01PaRGc61bnOZXIzWd6EijnRAk55NqWeTrmni9i5T37205+jdCey8llOcaploPYs6DgT2ilZJkQyDrULRElpkFlR9KH+iSUALnqQjVq0ohrlFkYjKtJ/lnRWAS3WQeO5ULCodCkuRQpMn5NRixhFIzat6URBetGOclSVHhXITQciVPoAVVg5HSpSTbrUgaCUWAcVl1FiKtWksDKq96SnOK9KTnM2lSlbXahVj+LVcJJ1qgT96U774VG2qrWtOlVrUuO61p0atZQ+DWpb67r+V1rZ9Vc+xStdk8pUwsq0kdJMKUuVclJO0aeqrHzsWcsaIq7m86RTbSxlJbNYxb40rSPNK2gFi9OQSTSvgR3tYFEZWKNatLVzDa22gPpW0hZ2qU5VFa0eW9CsejaymF2pZMna27I+Zbi8Re5ufSvZrPjqteFybVyjK1rqqjZbFH3uc2tb3bVy96islS5ItctTkn7XurG1bUlxmyrdMjec7pUncY2rVfpy9qxm/WpC5SvcsPZXK3/la1EFo6vp1na82z2vXInaV9imFr0PPi94BdyXWmHXrXqFsF/pmtPF4AqvB05vP9c7Kqjqt7673ex7g6virtIXv/m172Q161n+xzYXwA4eKYU9HGDVgjjCDe6wrTQMYr/O1sgTXrBoXytRH5+2xwMWcni9G2J2jlhUJY5xi2usUuK2GLMvjjF/t9zfGlcFWBbWF22fPGUHazhYQ5YygrerUR5fV7zhZXKceZpg535Yz1Tmp5VDheXfehmfnS30ifnrXhgner7L/cqb3WxnOReYz06GsGyB/Gccr9nS5n2wn+/M5iNretRyBvQ6BX1ORKt40b9FqEJf/Woww1fRjYa1VyTd4NMmWcDc/XSn1/xRHVd408DmNZrRTGxft/nCSub0j3ut1ChnmNepzuaqGdpqQ8fXxIxu9H7lW2tDu5rFLC03VXZt7GL+f3TPw750bN0cZGO7u9LJpnO+hT1t0yqYw3FusrP5Xd5643jS2L6mtmHkzW4fN7OyPneWvx1rWJe5nmU296H7vO8zw1a7AQ/twW/863szuLtB5TivgvzxaIf62gWHd6YRrk2Fv4jhEy8uinF+6FlHXLm4NrdYEYpfl3Y85b2ad53VzGzoHpu6Bxd1mi+8Vx4nHdW5ijp6RT7zZ9Z8mr3cuMz7TNQFNzvHTbc3Tvu9UblKPcckNbvAAWzauHPdml7/ejKR3vK9l53ga58w2kn+9pdvuO2CV7tdAS/tjqN2w1u3ezPxnveTERbykcd85kE2ecqPjKmO13zoRf8xznf+vmAmJfjoVb96fZXe9AFD/eVZP3va78r1r39X7XW/e9bfHvfm4n3whR953/8+WsNHfvJDXHzjv1P5z4e+P5nf/IJFUpMxpGQltR9IXEBDHr764AfzEUJ5jPAdJaSGMXjhik/A8fri4yT1jXdM+ZvM+u93Yfa3v38p4gIZ64iaYAm/sSG/djg/dBAHb6AGaFiGYfAFWqCFUuAeAcI/8pmc+lMg+sPA6iOjCsw/KOK/EJQiXjiHDuqXsamHEVqHcxCHBLwGaogGZjCGB0SFT2ghD8yY+NvAzNHAHYSX+8PB8dE/ESRCWfCGehClASS/dfAGaBgGXqCF2imFIPqETiD+nO5xP0nSQR/smh7kQnMBwiAMnyEswhCEhnUopQEkwHxIwfM7h3BQQGZ4QlcYnCw8pC38QpjxwjyEljAUw+8hwzLcv2UIh7GJJfGzB3tIQfNbB3R4w2uQQ1mQwCuMIzzkw5HZw0ssFj/8Q94JREHUPl64hnwIQF8iwHp4hzdUQGiQQWMgBl+AwlUYBUNyIUvURILJxFtMFU7sRN35RFAUpO6rh1JUprERoXdwQ2+IhhnEBVeYwiq8wu7RogvUxS5ErGo8GV7sRdz5RWAMpGWAn2pSQxAav/GTh3UIh2towFicRWnUHVvERnPJxXjsFG3cxsfpRm9co2E4Qn4CoUX+PL8VbEFvUMAGjEDuscPIoUZ6NJl5ZEgXscd7bJx81Mcp4gXvI0Z/JMd6aENVXEBmIAZeGAUKjBV4fEhkcciTNJGIlMjFociKTCFcGAYARDgUfAdvwAVMOhyTVElVScmefA6WbMnEeUmY9B9cOAd7mDljPAfuQRyeBEqSucaoHBehHMrDKUqj1B5v+L6a1Ad0WAaSLMmFpEp5nMqyTBarvMqMyUqtnJ0zzEjb0od3iIYhekqyREtyOcu8JBa1XMtYaUu3lIVhOAdDBDR9qIdocAULBB6+1EvHjBa//EsnAkHBrCReOMK4XCrEDAdayATGhMzHDE1kkczJnIvAdEv+XIiGd9BMk8qHc+AFngHN0SyWn6RNtShN00RNt1yGwgyxsVkHZvhMxoHK23QO2zROsMjNydxNrRyGUfzNxEQFxynO5FygvbROFFnOv2xOo7xIe2jNfpKHa6AFsdxJvMzOF0HO9NyK7VzL7oRJXFiGdzBBwqoHcaAFnWyc6mRP31jP/qwK97xK+IRJEkRCpgKhczAGd9xP9ATQEfnPB5UKAR1KAq1IXLiGrtzMd2AG/aROB5XQ48TOEC0OCm1JC9VH+WTNzayHZViFhFQc/iRRsIjQGW0KE5VIFPVGWiDB8EQnfZAHb3AF84xRELXR4ajRI1UKHL1HHfVGXAiHA/3+p/3BheE8ICNVUv8c0SxlCybdRif1xtX8p7FRUN6RUS5tpS1F07Tw0l4EU2DszTGVh2UYBTPF0jVNiyTl0jbtxDcFRcz0JyCNhlUgUuK8UzylUTVFVK/g0z/0U0FUTVLcJyANh1Ww0nc81EXtCj3N0kYVw0ctQ1qYz/pMJxASB14AnzPV1KfgVCX11CAE1TIchij10WbSh3UwBg/dHVVd1aZo1SN9VRyM1SK8SHmo1WXKh3dw0fDh1V49LGft0g40TccZViKUyRX90fEkVGbNVGi1CkX11qkIVg+sViKkBXQgVWzanyGFv24N16j4VRsd1wosVyLkyh8VB18o1Cv+bcx39SJw9denmFf8q1cRhMtsygdctUtu7deA1SWAdVimGNj3K9gQRIbCPNZf0od2UEwY3VV3jdhnDdn2lNZpZZyK5T9c8AZJFccglc0BatZwPaboo9matdnhK72b1dmd5dnMy9meBdqgFdrbeliuGNqjRdqkxaafVdqmddqnvSWmhdqppdqqDRmptdqs1dqt3Ras5dqvBduwbS+tmFmxNduzFVuvRdu1ZVunVdu2hdu47dm3ldu6tdvno9u71du9rb285du/BVyfLdqtCNzCNVzN89vDVdzFVa/B/S/GhdzINanEldzKtVzJc1wbu9zN5VxnotzOBd3QXaXPFd3+0jXdq81cMzvd1WXdUiLd1oXd2M2W15Xd2rXdXKHd29Xd3aUqsoVYnWOqe2iGSbACDVgDUqgGzJAMdbiFQ1ACJTiEYmCHVdoHUjiITBGIfZCUN/CAFXgDUpgG5ZWS5n3e6J1e3kXfXsldbUoH0LCIMFAHo8AH69WIWtgHVMIHxTAI7IWH+HiT8xWI+e0L+03fAsaV9cWmefAAwVgB7O2HTViM+TAlcJCDhMiUfdiQvniD+xUICBYMCTbgEMa4qihbwkoJg1ACUsAAhJBgc0iIPgAQhACHUXKHZvDf6+2HR0GIQ9ARANCGQHnhGD6IGRbhIq41eP3dL1ulCzGIA7j+3544iFvgFvc1iIV4EIQQDUTB4n2oEABADVzJCISI336gYgD4Bl0RYovIlFpIiHsI44OoBYHQ4oO4By4+DSPGYwT2lzkGgOmVkIP4YW25h4SgB0VJCDMRkwYQCNxoiV3JYB/uh31IiDHGlTS24H7QBIha4YOwhPo4CEXuB0bODjw2Yj3ul3JAiB/WAJrYFsPA4n4Ah4RYCCYmDYF4A4RYgV2RE4NIBttIFFxphltQBSlQ4weGqAU+iEMQCFoGgKK45YPIZVIWYVPmF3zgCwDAFIRYA265E4MQjRM2CI54jd3oh2cGPVp5hjueh4Tg4F25hzIGgEwBZ5Dq4k0QiHH+ruVyXjxp1l1q5pc/BgBLiOUolgxJhg0AaGeBuIdpYGiG3od7WGVovt9N0efUoxUXTubVQAgsMGjYSOhMRohMsZKDYIduBoBexpJmtuV95mfb9ed9aQaiiGlALuiDRuhbgWI4PhBnZmnJGOTdSAeEmISOroyEpl8cngyHuGYAOATlpWhzlr2WPt2X1hd1QIhACOmaPuiEpg96iOiDGOOn7mnJEGUAuIeBNghYIOrFMOpL7gd8gGqEIAXxFWuLlurapepw2QdkfhP62IdgAOzAFmzB5mrJYGO5rpN8jmte0V8AUAdsQIhi+OvBpmzATujGjue3ToTFaIyU5mm7vuv+2M3rcMFsg4jjcLFqKHHguj5nWkmGVE7ng/AQX8HsTIntxWgQ1o7q0A7d0QaX20aI+yjoyq7swh6IfehhlN7plQZt+kBrAGiGnAaAcphs4iZso6jtpEYId3CHhLBnz2bu1ubt1vXtbznshDhtu1hrwTBuB0kIPkho3S48u3DltHaTg3CH9e6Ly75kqMYCgViDjV7uihbv8V7d8vYWfEYIK/Brm27vdz5ku5DvXlmBg0iEVthiB8fuS7YAhKgDgcBqjJpwA79dBOeW+vYMrfboWTlvxpCMEd8VDwaAFZBxC4hkDR+I7E6ID++HED+IiQYRAt9tEr9cE98WHTaIRxb+bm4Jag8X34FQ8M9u4l6RbirmkV7RcQ8HcXbuhygPbwA4ACJ36dRVN1o6YQ2QbhDWln1gZgAwE8lgZg0AcISQgl7hhr4gBWDJbr4GAB5v82VGCDnvhwA/iDoXc9k1cm0x50kYaQCwgPb2lZmOYnyg9EoXiKO2qAq38F7pbo3QBT2/ZGZWgjk/CAbvB0wHKU03iEQ4dEQn86mgpXU+CF0w6YUQ5E0WjEkQCOAGAG5odGzulX0YD4sAEyy/5HnWBlRm4V1PCF93klaH3UQXlkbHhn34agAohm0xaY3QddtQgsWwAAfelc22iDP+lexuctxW6G8XjHCH9mh/dbSKpfv+NojpBWmDCARth41uh+UuTojZ7pV5DmdQz+p+kG6LIIV2Bgd/RwiAf/cDj3eFkqX4cGKDJ2Rt2XaL4Pd+SIce7nMi5pV7gAcxIQ1Iv5XsFghtUHAURhi76HiLqAOQf3iI912jVb14AAdsmAZtAGBjr1992Qd20AaG1gZ1gHSc13men3nRjviVAtrSBoA3KOSlp3pekfbI098IsAJHSAbRqPqvx92mny+wJ/uvvfqyR3veO/u0Z/vZW/u2h3vRe/u4p3viE3uNq/u8n9u7Hzq99/ubnfu/F3zCCvzBN/x/KvzDV/wq4/v8WvzH173EByZ1CIZDsIIV4ANSwIYnN5L+yr/8zN98+mBe54Ve6a0ViuaV6kXqSN7e7v3e8BV98i39nof8tpV8XNqHFt9oW89e3T8ILOD9t0Z1OG5v1NeV/C34/u2LMOh5Aa5fk699s298Gks1qD8IDXBg6zcI7B8IGdcINdeUINcVCr5kDF6MDe7+CI5+259+zko1jG4JgE5rIJYI+fdi+rcQS5b5xFZpXKnhGwYIAADg9dMm8OChQwcFauvXz9xCAH0cRQTn8CLGjBo3cuzo8SPIkCJHkixp8iTKlCpXsmzp8iXMmBuj0Kxp86bNGTNw8sQp0+W+iPj6YVtYzKGFhfdc3oq4dNNCD/v6NVXaD+pBqf2COd3+12AhrI1WouJLxsdDoGdTNVKMeJBgLaf4ItZyyFWpV7A/9/Lt6/cv4MCCBxMufLEnYpo6EyM2LPLNwnn9ki2sO2+hhZcKD2amGlHyZoGdqx6ULGphA4dYFvIRuzCCnIib1mJs63ZgP00RHWJYaMnh6YOp+60+2Nox8uTKlzNv7vw5R8Y9F0v3Cf0iVoHqPB/c9JD1yzpRHT4TZd48QfFZyZ9H36/P+H6QD65wfXvhN43NbqmS4pZgdgI55MFChzgE33ryLVTfdQ06+CCEEUpIWHU4UVdhTQ+SBkB+uh1URz/VLETKS2vEx5GJCXIURnzzHWTfQbUECEAwHd2DQ0T+BG0IgENfdecQiwm6KOCERRp5JJJJRohhTjsxSdODzSw0TT8pChRBP8UspEtGQd0XEW0XFSeQB/2kE0wtxWwn5nhnprmmQ2MJuZtGch6Ejzp0eeThW/18ExE78ESUTJwt0qkkookquiijKj2pmJNPPsjNQsF4qZQqCz3T5Zdg1hmVlAvdspadAHgQ6kGjFjrnQjACgKWgC6niESk5ArfQAQcUOFQ/pZapYKuNCjssscUi+WgUFzL5YJ4HqXJZjqEBwA2nnR4U5qoC5epWM9kCsG1E3fZqaLAZ+dpPrM7Sams/+Aw5Iq/jsvqisfXaey++gyGrLIYPQitQIs0uZI7+Egul02UwCSu88MLYyivcChEp4S0ADUS80MQPm+rQu66Wma5As3aU6UIE4ZNIpzV6+2vH+br8Mswxj7RvpMs6uI+uAinxZ0QiLhTPS6UCoM4+6h1Ej8ZDFx0R0ucCS6+544EMgMgckdznM9YCgM3KHB8qM9hhi20vzchCGNtBlEW0I7b7MPy2wg6XeoBDsETkjsZ092P3Qng73XLU601d9UZXC0RQcAe5445sXT9N5NiRSz75sY/yWyGEiQOQXcEAoHwQFhpdaq3c8e24ndOna8zy1xedO7hHhuM2ZOhVLlT7361TvjvvvSdX9qMQ6rJQb9+SnNRBk4iutUAOj7n+MXcHbff8r6rnXi5Gr0dEuEayE4S8QCD2Ewid10PtO/rpq78X8JJGeZscWroVVrVaOxwkmQ6pjj/01pN7vuuktr3Y2Soi4iPfQqZiPsitr4EOfCBJ2mczB1HKLaLwWUSO8hK05S962ukHB/sXke3wj3UH0Vvg8ge7kRVwIQf0VAm9dkII0rCGNjyM5WrWL2bdRhfluE01YIIggWhAf3d7z0KK6EEA4G2IAFCilQAgBY+ha4AsLFk/CPShA9HJiVBcyBRvKMYxok+CO3TQvzTFOLeUAyaaA8BUZGcaT8mxH7UK1sUARsUVWs1WTsxYFK3gkDvSK4+eIyMiExk5M2L+7kE4cws45uIWvL0EVQKZhjkMGUZLAgCTmiRPRLjBs4OoLIUb42PhbLUjbfxwIaIA5UJEGZFSKrKWtrwXI6sTIaMprh84cgpM7qHFL2lQmNYqZufuYwGCfEpwVuwjFtOhNa714x7JvM0yb6nNbQ4rl9KJ0BsBMBTbCAQDMgFHzm5jiXih80vrvAg4fAREjmhPVgTEYpY6RQraxPNLQeQmQAOaJG8yJkLDW0gY6haRPvxEHZZwywqSgS2HQlSiGUmHtMJnEXoK0J5X7JNDtME/nTUDWxh1Sx02KtCVsvRBBE0MBOlRDm1MQxtE64hMaWpTh10kHuDARk3ZkZx9sIP+pjW9KUd8ClRtCLWlTn2qc17aGKhStapWvaVUe3LVrXK1qw7MKk+8KtaxkjVsYLVOWdOq1rUy6qw3YStc4ypXCLnVJnO9K17zWpi6Zkivfv0rYFvCVygFtrCGPWxHBhsFxDK2sYVVrGMjK9m5Qnaylr3sWCuL2c1yFqqa7SxoQ7vNz4q2tKYVI2lPq9rVqi+1rH0tbBeZQ7PFtra2BZtrb6vb3Qort7z9LXCP5NvgEre41xmucZOrXMcgd7nOfW5fmgvd6VKXJdKtLnazK5Lrare73sUId78r3uyGd7zmhW55z6ve5KZ3ve4FbnvfK9/bxne+9n1tfe+rX9Pmd7/+/u1sf/8rYMsGeMAGbmyBD6zgx842eAt+MH8b7D4IUxjAEp5ghTNM4AufUcMedmyCPyxir4Z4xCa2aolPrGKnpnjFLg5oi18sY6xyuJEzvnFZY4zjHd9Qxzz+8QN9DOQhp0/IRD7y7oyM5CWPTclMfnLMnAzlKeNLylS+crGsjOUtt7XGuuQymL/q5W+GucxFHnNBzaxm3ml5zW5uUJvfLOfmxHnOdkZOne+sZ32hGaZ7/rOx8gzoQf9E0IQ+tEsMjehFp0TRjH50BPs8VUhT2qWS1mqlM33cS4dV056mM6fR+ulRMzfUbyU1qvdqarumutWBcbSrIQ3rWDN61rT+RrStb03oXOsa0Lzu9Z5/Dew7C3vYcy62sd+M7GSvednMNrOznx3maEuby9SuNpavjW0qa3vbUO62t5kM7nAjedzkJrJikaXudbO73e5+N7zjLW95d+EY3SAHvvOt733j2xq5AMS8awKGP+xCGQZXxi7uAIbq7IETqXg4J/awBScEXN5E8MEHOKDxjXPAB0xAzBLI0AY3kLzkbkiDFyhe8ZWzvOUuf7nLLwfzmdO85ja/Oc3rfW9+8zzf/gZ4xekQCWAo4xenAIUiJs7wSjw8FZLQgg+WoHKcS4cJPwgBxzfuccQ0wQtqGLnJ3XCGMUyd6mY/O9rT3iS1s73tbn/+e7x13vO5/7ziTriDJ35h9DxcQQtNKDti7sD0VFwiDyRwAQ8A73Ym9EAEGef4B7beEyeYAQ1fD7saxvB3uHO+856vkMw/L/rRkx7mcp87z+se8CY0nBWgyAMNRFCDzUvHDIq4ROGnwIEU9EDxbW/CEFQgAsiPgAdNSMwWxmB5LjCf+WNPgu9LL/3p1zz01L8+9rPfk9Ojft+qn3cQdjCFK0yBBhyQPe0T44QfFGEK5TfB7nvveScQQQYk4Pjhpa7+JORABSb4//8VARBEn/YVoAGm2wEmoALiHPd1n8/9W8A5wRDEQNZ9QAwswfGpXxPsAApkHQnsAPR1HvClwPD+bVwIqIAREGAUOAET1MD9cZwK/IAKLiAN0qD11SAO5mDO2ZsDeh8EypsTNIEPqEDW7d4OEIEKUgERwEAJbpwFDkH6tZ0TLMEONKEJAkEG8oQTJMEPsMDjbZwI7EAU6iAZGuANliEapmG7NWAPfh+80V8NYF0FFl8I9kQTCMELFCEHjAAMYODbBeEPpIAecsARct0QtIAVbpwN6J8aNuL0naEjRqIk8gQbOqAbvhsVCAEF6uEHvMAPjGFNMEEOdGARfgAK1IARUIEUJoEPvMAXZl0MDIHvIYEQyMArchwL+EAWTiIvuh0k9iIwqmEldt8lutsGCuIgisAL7IARMAH+EpTdFL5AIjohCthADxDB380gyzkBEjDBEOQAC0zjxqGAD1CByjnBFi6BD8TAC+rhCNhAHQajPFLdL86jPdLgMKJeMbZbEtQAKQ6iKdaADwyBESxBEtAEFUABCtxi1injDvQAQWIjzbHgEhjBEPiADbTjIIaAGAZhEFikD4wiQ3JcCLgAEt4jSs5cPaYkS2JfF+SCNWSDTM4kTdakTFpDLABdvPXjPw6ixokACrAADBhfFADfQvqkE5JACrhADjCiy23hD9gAC6CAHCLl+Ykh8OXAC5CgVWpcSZ5kS4blvK2kWJal6NkBIKSlWq4lW65lHHTBvDXBD7hiV3LAB3z+gAjYABRQXBDIwAjU5V2GAAl8Isw1gRG4AAncZV1yAAv0QBMQQQ6MgGLWpQrkQDyaJWYiIJlJn0N8GW25XWdSX2gqIP3xQA2wAGqmpmquJgscIcUZ5g7IAGvOJmq6wBAgAcwpYQ64AG2ypgvYABAwAQv6AG/25mrawA+AZWYu55OQpftwZj94poOBZnSK3mguVnUuYDryAHd2p3d+Jw/4IU3QHxSAp3lypxAIJ8wFIRQIwXmCpxBgIMVRwRK453t+JxSoJ3PuJ4Y4p81Ap3Q+J39iJ6sNqIEe6D365w4B6GZO54Be53UiqIROqBoqaCNh52hGKIFimoZmRIGCF6v+dSaEgihhhWhf4VCJbmiKiigOkah1YGh2EuhhsOiLEhaKymiL3iiMxiiLhqaH4miE/iiFDimRSp+FfplGpGiBihqKRoeNCqmTOqmKNmmSriiPVudMVCmTUil40eip7SiXdumPSml0jOlMgKmGFqmarqnaHSmZZWeGXimmLemInmhf3amNKumUYmmdKqmPyimG2qmemqif9qmhAmqg5qmVLmmibuiIwimfMiqbTiql4pybptmXqqimSqqf4emicqqn9iin/qmekqqnbuqpOmqMqiqoFqqkpume0imiouqgVqqt3urKXWqnyiqkcuiqnlqcymqmhiik6mhisSqe9mn+rQ6qqSZrsAJrr55qh1KpsErrr7oormartrqbrk7VsDZrp7nomWoquO4plAopmprqob7qrzJrpDIquI6rsporia6rtZ4otm6rvu4r6OmQvAVptMIqtD6pqzprpoLos87rwQLqvMIqwLqrwRJsrN4rjBbsp5Yqn5Yrv24sx95Et84pu9IqyCqrvUZswVLryDLsrDrsqpZrnT6rxZLru8ZsvKpsq3Yszu7rx4ZrquZryJorxQKt0A6tvcbpzMosh4Zqy14pycIs0hKtzVKs0eYs1XLszm7ptLYrsRrtucLrmZIposprxprpz2JttPbso3at1Iotrx7tn2pp1cZttl7e7Zf6rI6G69Kq68rOaLG+66Pia9jyLcbebI/O6qYaa44OrtLqrde6bd8uq9xGrprSrYNSqMBeqORmrua+HeVOEJFe7pturuiOrtl1LpJqrYGC7qShLum2ruvKm+mGbpGqbtK+ru3eLuz6K+7uLu/27tnFru8Gr/AOb00AL/EeL/K+rvEmL/M2b+Qur/NGr/TyK/ROr/VeL6VWL/ZuL/dK6Ax0gA6Er/iOL/mWr/meL/qmr/quL/u2r/u+L/zGr/zOL/3Wr/3eL/7mr/7uL//2r//+LwAHsAAP8P92QEAAADs='
# print(bg)
icon = 'R0lGODlhggEGAaUAAAAAAD0AAEkAAEsAAFgAABQUFGYAABcXFxgYGBoaGhsbGxwcHH4AAIQAAJQAAJUAAJYAAJwAAKIAAKQAAKcAAKgAAKoAAKsAAKwAAMMAAMQAAMgAAMoAANIAANYAANkAANsAAN8AAOMAAOQAAOcAANoPEz4+PkBAQLwdI7IgJkdHR0tLS11dXWdnZ29vb3Z2doGBgY6OjpCQkJubm6ioqLi4uLy8vM/Pz9DQ0NfX1/Dw8Pr6+v///wAAAAAAAAAAACH+EUNyZWF0ZWQgd2l0aCBHSU1QACH5BAEKAD8ALAAAAACCAQYBAAb+wJ9wSCwaj0giS8drOp/QqHRKrVqv2Kx2y+16v+CweMzTsZLotHrNTh5i5Lh8Tq/b7/j8NXZo+/+ARwgzeoWGh4iJiotOMwiBkJFJg4yVlpeYmYuOkp2QlJqhoqOkpVScnqlqMKatrq+whzCqtEYrsbi5urtcK7W1NGMBDRYZHiMoKcrLzM3Oz9DR0tPU1dbX2Nna29zd3tYoIx4ZFg0BYzS/nidhAxEbJN/y8/T19vf4+frOJBsRA2FOqIPk4gsBDCD2KVzIsKHDhw1BYCDwxcXAP4S6OOgAsaPHjyBDQuzgwMuMi2sK2OhCQYTIlzBjypwJTQSFLjYKoEyi4Ab+lwrxaAodSrSoPhIVuNxQsNOIAhxbHoQwSrWq1avUQjzYgoNpUyEFfGYxwAGr2bNojXIwoOWGzq8rs0gokbau3bsfS0jQYuNrxisCNOAdTLhwPg0CspxEWRALgw+GI0uenO0DgywWB7LDAiEo5c+gQ6cgASGLQHVZJohezVryhCzqgl250Lq27bsXsKSjdWv27d/Asea+4ksVFtXBkysf+vqKKlZWICyfTh1maSuzOiG4wsBz9e/gGZK4bOWRpL9TBEAOz759vg+Jqyz+dEWw+/v45Wm4Yj4QeikS5CfggNrsJR99VRhAF4EMNghNCWxV0Z8f/0VRloMYZpgCB1b+zNfGAVY8oOGIDm5VRR9+wFHFVCS2KGAIVsTwhxUVuGhjfklV4QcLVnh344/fkWDFGWwwQQUFQCYZ3k1U6MDGAla4pOSU04lgxQJrvFCFA1R2uVxJVLywxg5VcOTlmb91UMUOa1RBAJpw3kYRFWqoUAUGcebJGgZVqJBGhU8kpOegn4FwIBpVDEDoopQBRCeiVETAqDZiTLpPBDomkUAVG1g6TR6ezrNBFQkkYUIVPoaqSKjdCEmFCUm0QEUA+qxaja3ZXNKNrp9a0tA5U7SQhAxUNICPr9Ege6smuTL7TCgLNUCFDEnUQIUF9jjLjLbQlLLsKNuSso8FVNSQRA7+VGRQjyjhctuuKdK0ooy8+WRARQ5JGCmFB+tCuwy7zeTSLbwp0IuPB00mUcUI9IhbMLjz8hIwL/7eM0KmR1SRzDwOd0xxE/9+nEk+KGBsRBUNi/wxACy37PLLLM/xsMqY5GNyEShzTHMsMPfsc88702zzoxlTkXLQpPys9NIvIy3xsUSfbLTOUzBttdJfXK01zGRs7TXQcXwtdstdjD12zvfcTATa31Rh9tVZv820GHKb3XXdWpeNN9xTpx01zn237fbeYOtNeNNgHH52GIovbXjjhUsx9BQKB+6NFZCT7UXmMW/O+deMf+7y46IDwHa/lCNx+q6Dc056456X7nX+3LK/7vrqVEtR+RSoV337Fp/HLvvstv+exfCm4y6P2kMovw3mxh8f/RXI281F9cAjb8Xkuqtu+eWtQ1783tlXv7gW2KOv/fdHd180771HEfz1mdNv/vlYpC/98M4Lnvr7kouf/OpXPvGp737Wy9/69lc79uUuCrsLYPukMD8Gws6CCBTbAUtXQA46cHl/W9sHnxe+C2LwcCfMIOgU2MAUEhB+UPuf1GA4wQEa0IV1w6EKt8ZCD+oQhf0DnwwBR8MH2tCEPVTcD3fItxJWMIlPlKDfhijCIoKQCtPDYhaPyMS3UW+BX2yhFWv4hAhGIYYUfGEYkejELuIvjfxbIvn+Rug/981QimR8wha5CETouTGHbdwjHwW5D+YJIYgk1OIN/chG3/0RkIr04RpFh0jWUbF5dEykIxu5yT4G8pErjCQloRhFKBQyhJgc4xU7SThSzpGRoPSiKEvJSkHy4JSXPGQmNQlHJU7Sk7X0pR4lOUg1fpKTT8ClHYmIxzw6wZjBxNsvgQkFYg5TjMcUpiqNWEbvbbOOxZQmLFs5TXLOUpvVjGM5F3lGZULQm810ZhOgGU65uVKc46TmNUe5TmSCzJ1QMKMpuddLfe4TnQU1aD3xuVCFHpSWTlCIIX9QSV429I0X1WA/7XnPV2aUoeekZ0QB2k0AtpOgH+VhNhP+uFKW5tOcKeXoS9k50FqhUpffBGdMmxhNmbYUoyH1aDqx2VOH3pKkThBoMm0aVJ/ulKdNlaUcgTpPdf4UkjnVaUnveFKUPlSoQzXqM2l6VaqCMaowjacAt8rMrqIxoSD9alyL6tKZgnWs1nwqVf+J1CYodaRMpatG4TpXvVqtg/60qmDvCtjAvtOkNfWqXJ06WanalbJlHaxhiZdZy7r1rQGF52enuFiVEhazp8XqRlVbWbGeNbKS9atoYQvasKYVr2KtKlnRiloevNa2EOWrY0ML2aX2Vbf63G1qe7tc1uKWn6Vl7lGP+1fhDhe4zlXuZh23Qe0qdrt5q6gll1n+RbVmq7NzA2/kLutZ9jL3u9jV7nSvm9TZGve4vkVofvUbXc6u1rnPtSV8G0tf2RaXwAXe720VzNj+mva/7yVqfP053wRXt8IJZnBcKaxen3XXuxJu7YIRXNv6Hti6GWajfCcM4A5z18F7RW5wMVxiA3OVtrGVMUg57GKudXTEOhZpkOXLkImKV6si9q+GWwxjqPJWukum8IBJTNrH3vi+EkUv1pKsZAhr9slQnvKUUVxjHlzYIe59cJTr6uVQarnLPd5ycxtM4zKfGc1gDu+a2ZzmLzcZzixW6JgbYuRdynPPg+VxnEfX5zCHGNFArnOViXtlKucY0pxVdKDbu+j+9f4Z0JguLJntbF9LlznUeuZvo0E9ZEWL+dGSPm8uKWroQ/dR1aUYM5d7C+tYr9XElR51is2J61H8ds4t7jWhb0rrrMr60z4utrF7vWtOt9qoy571kbkZYHtKWxS6rjafUW1tYU+areUdbZbzPLdva+LYyM4urPGs7VrbupXYnhkmwi1uNg862+RNpXmHTb58v5vf/fYzucvt61/buK04XsiqsZZvfTNCf1PltLIBbmWIY5njnRY1hi0Bb3YDdeNFZva2ud3tLm7vWYrAeMb9ndePW7jU5j5uLDtnbyGafNxvVvO1I/2QQjv71Dt/ucQnzvCfu7nluC66ynt+70f+Kn3pQWd11p288BhLvd5HP3fCZ9xwlkN9xU4XetcVbmqxPzzdEU952oks97lrnelwnje9A47Tga8b2hX/O+DV/mFO1rztz+443G0O8k2XvOyrHDzXTbn1w45d61/ne7P9LnjH67rukp/8SCuf3rNHGvJmv/NH8E5h0Mdb5P8k/YuHTmeIGJ3z+HXj1TO8dqBPl/V6pr3IbT/1sCPd5StvluwtjzbgQ1Xve1e8wNUd/ZATHvFIvjzbG+t85gufycQHO+5zv8Pdn7r3Mbbu8j2s/eBTP8eqX/36n/7+yIfe+82cv49ND/vwaz75ied5ttR51udpn6V/iMN/TIZ6qYf+cwx4aSpkfgFYgPvHPgjIaAooXf4nfX1Xf64ngBxGgCC4gDTWfbOHfu4Xdzd3Yg94fAgkgYeGgvSnbiY4e4fXgtlnZg4IEzU4g9g3XvdXeg7Ugz/TfvingvC3gy9BhCmIhD4XhCc4cBeoOTIoejj4hJTmcT8oghlYbLwnc980hTx3g1cIhFm4eFvIhVW4fTlnhhSYgHQkhsmzhlHohG6ngywYE0wohONHKXsoZ5kkh13oex1xex7YeEaIeZdGh1bYhn+IgTfoEYZohx+YiNfXhhb1hhi4S49IhZFYiMXXh1gHhUUIgL3SiRXIiWI4iF4HiuJ3iIjIik23iGAIixb+N4Kh9IkbeIbTR4mVKIuEeIffR2eYGDKkCInQZYsOh4fBVoZlVnK0yG/YsIrDmH6SGIrKOIqvF4wTiItN94CoCIyX6IzcMImMJ39214jCWI1QBo7UmIzZ2IB5KBOoSIXG9y3p2ITxWI+MWIr3KE/xx4PUSHWnmI/6eI4FSYH9yH7/KI/NSBP8aIoDI4f2SJC3aIlFqIu7iG69iJDXaJD+2JDJUo+baJEXKY55o5GZx4GbF4/a6I2X2I0wyYbFOJHbCDoqWX0c2YG+qJMoaYUy+ZN354zhCI89uYwBKZDHWJFHGS8kWZKiCHMgCTRG6ZEr+JAQOZUGmIYJeZO+R47+JymUjlOVXAmQSjgTAxmVI0mRPCeRxuiVKTljIGGOZfmSGMmQaimVbDmHJjkxS2k6ZAmW5YiNTWmXYlmHdekMbQZlyxg8gTmXhGmVrgiXBwmWi/mNghkxWjk6j4mO/9eXOdaZWKiJbSmSNkmZTASDv8iMWliTPsmOPuiaevmXcAiafkmbA+iZLOmWzyiXbkiaWNCbO8eHLumQrZmZjembg7mXVxCaw0mcknmVxykUaVmcYXmYjSib1rCXUQeZr1iYaiiafsiWwemczwmI1ml/O9mS4Klzyql8mxmSpkkNT9l63vmZ82meQoaDP3Z62nkN9dmdusmLPBmdKzmTQDn+nvG5le2Jj6iZQaq5mkm5hAvKlAb6lrhpoXXQlXf5efe5m7Z5h+K5nfWZCGv5oC/Im7/5dh2ZmO5JSNNYooswmwgKoSG6oqyJhv8Znu9Jn5eZfnigmBUKox9KoOx5obG4kKnYkz9Kk6BymyhqPiqKoxMqEtXpi00am4cApaRJdCFBlzv6ovsJoDKqLJqZoQ5lpZHpogk2oieKpu7CnQuopt+JpKvppqcZpdDJCFzaoYGJnJkIbNOZlbRZSVk6lIbQp9jpoUW6nlOanETqlEMagZYToOBHp/iZlzwaqXnapbVIYJbKXBRap2yqn9rlo5NKqUUUqgw3qpmanqZqn5L+iqb3szpy2qqYCqL5KZxjyqGLmnTfc6vWmKtG+qjyhKeKCpvnuaQfJ6w06aq6qqlieqq+qqzLCpXNmqrECK3FeqOQ2quz6qlSGqzOympK+aoNGqsC+qY1uqy2Wq4xya2O6q3H2qM06qdWR67aeqnEOq+7KqL2KqS0WqsWuK+iKq+CqqOAanbI+i7i+qlkBq+Vea7RCqu8Sq3h2q7niTsSe4TpapwKS52Tikgdy6lnqqe52a8J26Jh2qYBm6xKmq9jxKqKqLIsWqClerGyyq74+kccW7Lo+bEgy7ILq56/yqANB7QYC7PXaqxUepb0OLKBaLAEq0pKi5cWO7Q427L+6pqm1RqzPjuEVMuGerimXKuz69qpRwtKynO1zGqnSTiPaCm1WeW2XiuwA1uzNpujRCuyhTq1eQuxdUaz6kix3fqvQdmwGKqxz9m2dquhZ2uWchu1f1u3Yyu4PLu2Xmq4/iqtLmuyePuw0Ki21gp9CHuzR+oEG0O5EJOxTgMGmasyMVEyZssDDDO3DuO6r+sFsSsyMXExzKYvUcAvuNu6vbu7WnC8FBMTCDMFToIE6DIF6kKoFfO1yNucuvsxMmEvU4AvSGAtU4At1Osuynu9ceg0MkEuU2AuSEAsU2AsfjsyZGq+W+CgvisT0jIF1IIEsjIFtBK/NTO/9JsF9qv+vTIBLFIgLEhwKlSQKpw7o/A5wOK1MzPhKlMAK0iwKVTQKURhpgoqwYVpwDMxKlRQKhcmKUaBKzmIvCtsvDOBKcy2tTygKKxSw2bhKLMmBIDiBIJiwz5cFIZyCmlgJ1SAJz98xELBJ1TgJ2ngJkj8xDIxJzk8BGRCBWYCxVjsEWpCBWyiBlpCBVySxWL8EGAyBWKiBlBSBVIyxmysD1ZSBViyBsIbBUjSxnZ8D0zivG3AI6hyx34sDxY8BUTCBjTyx4bMDTkSw0egIlTAIof8yFkRI38AIlUgIpB8ydFgIlSAIhRiBReCyaCsDBxyKH+wHQmyIKF8yRBSHpCww07+ECCpfMkGIsQIUgX2EcuGvB+sHAmu3ATqgct/DB8doh3c4cDADMXjwR+eAB1VIB3HzMbXUQXZ4QnH8cxi3BxWwBtYQBvW/MTDYQXFoQqyYQXc3M0+/M1VsBu1kBrmbMPYnM3qsBlX0BntbCmkYRoX0RjcsR71PCiWgRk70ctPEBj9rCeIoRhwoQVzUdBnohd88RU/EBZaQBYMTSVr0RZv8RVPERWOXNEuohVc4RUQ/QM98RPG7NEEghRKIdIjHdFxsQUtgdIOYhM4kdEtPQQCPQUbIdMCQhImcdNIoM9ccBA9zNPVIRFSzAWZAdRHIM9e4A7wYNTB0Q//EBBMnQbP4wwGw1AMx7C6Ul0Y4TAO5YDAYKDOV40GvQHCar3WSBPOZ70KbB3Xcp0L0/zWbAAKc53Xeo0JqGDXpZzTex3Ygj0Gfe3XgYDXg53Yiv0FhW3YkfAGix3Zkl0FfODYv7AEk53Zi20Glo0SC/ACVazZos3WO/ACcdzZX6ECgD3arM0LM8DEqH3VCWACLSADNZADc9zaui0KOpADNSADLWACJhzbxF3cxn3cyJ3cyr3czN3czv3c0B3d0j3d1F3d1n3d2J3d2r3d3N3d3v3dXxEEADs='
# print(icon)
| 3,936.428571
| 20,783
| 0.968427
| 812
| 27,555
| 32.8633
| 0.98399
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151064
| 0.000617
| 27,555
| 7
| 20,784
| 3,936.428571
| 0.817961
| 0.000762
| 0
| 0
| 0
| 1
| 0.999201
| 0.999201
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
16d71b2013ab2ae3fba259fdad70b0eea1f87f00
| 6,277
|
py
|
Python
|
examples/ml_examples/test/test_tensorflow_cnn_examples.py
|
poncos/deeplearning
|
0874b22dab05117bcf39ccb4895d513ab6f87861
|
[
"MIT"
] | null | null | null |
examples/ml_examples/test/test_tensorflow_cnn_examples.py
|
poncos/deeplearning
|
0874b22dab05117bcf39ccb4895d513ab6f87861
|
[
"MIT"
] | null | null | null |
examples/ml_examples/test/test_tensorflow_cnn_examples.py
|
poncos/deeplearning
|
0874b22dab05117bcf39ccb4895d513ab6f87861
|
[
"MIT"
] | null | null | null |
import unittest
import cifar10.cifar10_loader as cl
import cifar10.tensor_flow_cnn_examples as tfe
class FixedLengthRecordReaderTestCase(unittest.TestCase):
def test_static_conv2d(self):
conv_result = tfe.conv_example_static()
print("Convolution result: ", conv_result)
print("Result with shape: ", conv_result.shape)
print(conv_result[0, 0, 0])
print(conv_result[0, 1, 0])
print(conv_result[0, 2, 0])
def test_conv2d_image(self):
print("test_conv2d_image")
cifar10_record_list = cl.load_data_set()
image_data = cifar10_record_list[0].payload.tolist()
print(image_data)
conv_result = tfe.conv_example_static(input=[image_data], filter_var_name='image_filter1')
print("Convolution result: ", conv_result)
print("Result with shape: ", conv_result.shape)
print(conv_result[0, 0, 0])
print(conv_result[0, 1, 0])
print(conv_result[0, 2, 0])
def test_maxpool(self):
print("test_maxpool_image")
conv_result = tfe.maxpool_example_static()
print("MaxPool result: ", conv_result)
print("Result with shape: ", conv_result.shape)
def test_maxpool_image(self):
print("test_maxpool_image")
cifar10_record_list = cl.load_data_set()
image_data = cifar10_record_list[0].payload.tolist()
print(image_data)
conv_result = tfe.maxpool_example_static(input=[image_data], maxpool_var_name='maxpool_filter_1')
print("Maxpool result: ", conv_result)
print("Result with shape: ", conv_result.shape)
def test_maxpool_customized_input(self):
maxpool_test_input_array = [[
[[2., 2., 2.], [1., 1., 1.], [1., 1., 1.], [2., 2., 2.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]],
[[2., 2., 2.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.],
[1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.], [1., 1., 1.]]
]]
conv_result = tfe.maxpool_example_static(input=maxpool_test_input_array,
maxpool_var_name='maxpool_filter_2')
print("Maxpool customized input result: ", conv_result)
print("Result with shape: ", conv_result.shape)
if __name__ == '__main__':
unittest.main()
| 55.061404
| 111
| 0.339175
| 1,026
| 6,277
| 1.983431
| 0.045809
| 0.742998
| 1.110074
| 1.474201
| 0.834398
| 0.774447
| 0.735135
| 0.704177
| 0.704177
| 0.702703
| 0
| 0.182805
| 0.295842
| 6,277
| 113
| 112
| 55.548673
| 0.277602
| 0
| 0
| 0.717391
| 0
| 0
| 0.048749
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054348
| false
| 0
| 0.032609
| 0
| 0.097826
| 0.228261
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
16ed432d9aae4132c02b69837ac4c613aeeff8e4
| 53,593
|
py
|
Python
|
src/models/model.py
|
zsLin177/IRnet_dynamic
|
ad0dab9938e027f00a219a1a7e2953648d0229b8
|
[
"MIT"
] | null | null | null |
src/models/model.py
|
zsLin177/IRnet_dynamic
|
ad0dab9938e027f00a219a1a7e2953648d0229b8
|
[
"MIT"
] | null | null | null |
src/models/model.py
|
zsLin177/IRnet_dynamic
|
ad0dab9938e027f00a219a1a7e2953648d0229b8
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# -*- coding: utf-8 -*-
"""
# @Time : 2019/5/25
# @Author : Jiaqi&Zecheng
# @File : model.py
# @Software: PyCharm
"""
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.utils
from torch.autograd import Variable
from src.beam import Beams, ActionInfo
from src.dataset import Batch
from src.models import nn_utils
from src.models.basic_model import BasicModel
from src.models.pointer_net import PointerNet
from src.rule import semQL as define_rule
from src.rule.dynamic_oracle import adjust,adjust_sketch
from src.rule.semQL import Sup, Sel, Order, Root, Filter, A, N, C, T, Root1
def lf_to_sketch(lf_seq):
sketch=[]
for action in lf_seq:
if(isinstance(action,C) or isinstance(action,T) or isinstance(action,A)):
continue
sketch.append(action)
return sketch
class IRNet(BasicModel):
def __init__(self, args, grammar):
super(IRNet, self).__init__()
self.args = args
self.grammar = grammar
self.use_column_pointer = args.column_pointer
self.use_sentence_features = args.sentence_features
self.max_time_step = 40
if args.cuda:
self.new_long_tensor = torch.cuda.LongTensor
self.new_tensor = torch.cuda.FloatTensor
else:
self.new_long_tensor = torch.LongTensor
self.new_tensor = torch.FloatTensor
self.encoder_lstm = nn.LSTM(args.embed_size, args.hidden_size // 2, bidirectional=True,
batch_first=True)
input_dim = args.action_embed_size + \
args.att_vec_size + \
args.type_embed_size
# previous action
# input feeding
# pre type embedding
self.lf_decoder_lstm = nn.LSTMCell(input_dim, args.hidden_size)
self.sketch_decoder_lstm = nn.LSTMCell(input_dim, args.hidden_size)
# initialize the decoder's state and cells with encoder hidden states
self.decoder_cell_init = nn.Linear(args.hidden_size, args.hidden_size)
self.att_sketch_linear = nn.Linear(args.hidden_size, args.hidden_size, bias=False)
self.att_lf_linear = nn.Linear(args.hidden_size, args.hidden_size, bias=False)
self.sketch_att_vec_linear = nn.Linear(args.hidden_size + args.hidden_size, args.att_vec_size, bias=False)
self.lf_att_vec_linear = nn.Linear(args.hidden_size + args.hidden_size, args.att_vec_size, bias=False)
self.prob_att = nn.Linear(args.att_vec_size, 1)
self.prob_len = nn.Linear(1, 1)
self.col_type = nn.Linear(4, args.col_embed_size)
self.sketch_encoder = nn.LSTM(args.action_embed_size, args.action_embed_size // 2, bidirectional=True,
batch_first=True)
self.production_embed = nn.Embedding(len(grammar.prod2id), args.action_embed_size)
# C->T T->min没有放到产生式里面,一来知道了前一个是C,那么下一个必定是T,前一个是T,那下一个必定是子数结束,
# 看了grammar,prod2id里面是没有C和T的,所以production_embed没有C,T对应的repr
self.type_embed = nn.Embedding(len(grammar.type2id), args.type_embed_size)
self.production_readout_b = nn.Parameter(torch.FloatTensor(len(grammar.prod2id)).zero_())
self.att_project = nn.Linear(args.hidden_size + args.type_embed_size, args.hidden_size)
self.N_embed = nn.Embedding(len(define_rule.N._init_grammar()), args.action_embed_size)
self.read_out_act = F.tanh if args.readout == 'non_linear' else nn_utils.identity
self.query_vec_to_action_embed = nn.Linear(args.att_vec_size, args.action_embed_size,
bias=args.readout == 'non_linear')
self.production_readout = lambda q: F.linear(self.read_out_act(self.query_vec_to_action_embed(q)),
self.production_embed.weight, self.production_readout_b)
# 手动的,第一个参数是输入,第二个作为权重矩阵,第三个作为偏置
# (action_size,len(grammar.prod2id))
self.q_att = nn.Linear(args.hidden_size, args.embed_size)
self.column_rnn_input = nn.Linear(args.col_embed_size, args.action_embed_size, bias=False)
self.table_rnn_input = nn.Linear(args.col_embed_size, args.action_embed_size, bias=False)
self.dropout = nn.Dropout(args.dropout)
self.column_pointer_net = PointerNet(args.hidden_size, args.col_embed_size, attention_type=args.column_att)
self.table_pointer_net = PointerNet(args.hidden_size, args.col_embed_size, attention_type=args.column_att)
# initial the embedding layers
nn.init.xavier_normal_(self.production_embed.weight.data)
nn.init.xavier_normal_(self.type_embed.weight.data)
nn.init.xavier_normal_(self.N_embed.weight.data)
print('Use Column Pointer: ', True if self.use_column_pointer else False)
# def select_action(self,t,action_seq,probs):
# '''
# Args:
# t: time step
# action_seq: list [[]]
# probs: tensor:[batch_size,num_rule]
#
# Returns:
#
# '''
def forward(self, examples, P_dynamic=None):
args = self.args
# now should implement the examples
batch = Batch(examples, self.grammar, cuda=self.args.cuda)
table_appear_mask = batch.table_appear_mask
# 用来表示各个列有没有出现过?
src_encodings, (last_state, last_cell) = self.encode(batch.src_sents, batch.src_sents_len, None)
src_encodings = self.dropout(src_encodings)
utterance_encodings_sketch_linear = self.att_sketch_linear(src_encodings)
utterance_encodings_lf_linear = self.att_lf_linear(src_encodings)
# 在实际代码中加了这个,对BI-LSTM的输出再做了一个MLP
dec_init_vec = self.init_decoder_state(last_cell)
# 用来初始化解码的那个lstm
h_tm1 = dec_init_vec
action_probs = [[] for _ in examples]
zero_action_embed = Variable(self.new_tensor(args.action_embed_size).zero_())
zero_type_embed = Variable(self.new_tensor(args.type_embed_size).zero_())
sketch_attention_history = list()
if(P_dynamic and P_dynamic>=0.5):
current_sketch_objects = [example.sketch for example in examples]
action_seq = [[] for _ in range(len(examples))]
for t in range(batch.max_sketch_num): # max_sketch_num这样是否妥当
if t==0:
x = Variable(self.new_tensor(len(batch), self.sketch_decoder_lstm.input_size).zero_(),
requires_grad=False)
else:
a_tm1_embeds = []
# 每个example在t时刻的last action的repr
pre_types = []
for e_id, example in enumerate(current_sketch_objects):
for node in example:
node.parent=None
node.children=[]
if(t<len(example)):
action_tm1 = action_seq[e_id][-1]
# print('example:%d at time:%d'%(e_id,t))
# print('now pred:',action_seq[e_id])
# print('now obj:',example)
new_current_sketch_obj = adjust_sketch(action_seq[e_id], example)
# print('adjusted obj:',new_current_sketch_obj)
current_sketch_objects[e_id] = new_current_sketch_obj
if type(action_tm1) in [define_rule.Root1,
define_rule.Root,
define_rule.Sel,
define_rule.Filter,
define_rule.Sup,
define_rule.N,
define_rule.Order]:
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
# 这样就得到了前一个action的embed
else:
# 截断,因为是sketch,action序列中就没有A->...
print(action_tm1, 'only for sketch')
quit()
a_tm1_embed = zero_action_embed
pass
else:
# 超出原本action长度的时候,前一个就设置为0
a_tm1_embed = zero_action_embed
a_tm1_embeds.append(a_tm1_embed)
a_tm1_embeds = torch.stack(a_tm1_embeds)
inputs = [a_tm1_embeds]
for e_id, example in enumerate(current_sketch_objects): # 这里example变了,不知道影响大不大
if(t<len(example)):
action_tm = action_seq[e_id][-1]
pre_type = self.type_embed.weight[self.grammar.type2id[type(action_tm)]]
else:
pre_type = zero_type_embed
pre_types.append(pre_type)
pre_types = torch.stack(pre_types)
# pre_types:[batch_size,type_embed_size]
inputs.append(att_tm1)
# 这个att_tm1哪里来的?我明白了,在t=0的时候走不到这里,在下面赋值了
inputs.append(pre_types)
x = torch.cat(inputs, dim=-1)
# 拼接成输入
src_mask = batch.src_token_mask
(h_t, cell_t), att_t, aw = self.step(x, h_tm1, src_encodings,
utterance_encodings_sketch_linear, self.sketch_decoder_lstm,
self.sketch_att_vec_linear,
src_token_mask=src_mask, return_att_weight=True)
sketch_attention_history.append(att_t)
# get the Root possibility
apply_rule_prob = F.softmax(self.production_readout(att_t), dim=-1)
# apply_rule_prob:tensor with size [batch_size,num_rule]
# pred_t_idxs = torch.argmax(apply_rule_prob, dim=-1).tolist()
# for i in range(len(pred_t_idxs)):
# action_seq[i].append(self.grammar.template[pred_t_idxs[i]])
self.eval()
for e_id,example in enumerate(examples):
_,sketch=self.parse(example)
if(t<len(sketch)):
action_seq[e_id] = sketch[0:t+1]
# 将t时刻预测记录
self.train()
for e_id, example in enumerate(current_sketch_objects):
if t < len(example):
action_t = example[t]
act_prob_t_i = apply_rule_prob[e_id, self.grammar.prod2id[action_t.production]]
action_probs[e_id].append(act_prob_t_i)
h_tm1 = (h_t, cell_t)
att_tm1 = att_t
# action_probs:[batch_size,len(sketch)]
sketch_prob_var = torch.stack(
[torch.stack(action_probs_i, dim=0).log().sum() for action_probs_i in action_probs], dim=0)
# sketch_prob_var:[batch_size,1]
table_embedding = self.gen_x_batch(batch.table_sents)
# 恶心,明明就是列,为什么叫table
src_embedding = self.gen_x_batch(batch.src_sents)
schema_embedding = self.gen_x_batch(batch.table_names)
# get emb differ
embedding_differ = self.embedding_cosine(src_embedding=src_embedding, table_embedding=table_embedding,
table_unk_mask=batch.table_unk_mask)
schema_differ = self.embedding_cosine(src_embedding=src_embedding, table_embedding=schema_embedding,
table_unk_mask=batch.schema_token_mask)
tab_ctx = (src_encodings.unsqueeze(1) * embedding_differ.unsqueeze(3)).sum(2)
schema_ctx = (src_encodings.unsqueeze(1) * schema_differ.unsqueeze(3)).sum(2)
table_embedding = table_embedding + tab_ctx
schema_embedding = schema_embedding + schema_ctx
col_type = self.input_type(batch.col_hot_type)
col_type_var = self.col_type(col_type)
table_embedding = table_embedding + col_type_var
batch_table_dict = batch.col_table_dict
table_enable = np.zeros(shape=(len(examples)))
action_probs = [[] for _ in examples]
h_tm1 = dec_init_vec
for t in range(batch.max_action_num):
if t == 0:
# x = self.lf_begin_vec.unsqueeze(0).repeat(len(batch), 1)
x = Variable(self.new_tensor(len(batch), self.lf_decoder_lstm.input_size).zero_(),
requires_grad=False)
else:
a_tm1_embeds = []
pre_types = []
for e_id, example in enumerate(examples):
if t < len(example.tgt_actions):
action_tm1 = example.tgt_actions[t - 1]
if type(action_tm1) in [define_rule.Root1,
define_rule.Root,
define_rule.Sel,
define_rule.Filter,
define_rule.Sup,
define_rule.N,
define_rule.Order,
]:
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
else:
if isinstance(action_tm1, define_rule.C):
a_tm1_embed = self.column_rnn_input(table_embedding[e_id, action_tm1.id_c])
elif isinstance(action_tm1, define_rule.T):
a_tm1_embed = self.table_rnn_input(schema_embedding[e_id, action_tm1.id_c])
# 发现一个错误,应该是self.table_rnn_input吧,不然都没有用过
elif isinstance(action_tm1, define_rule.A):
a_tm1_embed = self.production_embed.weight[
self.grammar.prod2id[action_tm1.production]]
else:
print(action_tm1, 'not implement')
quit()
a_tm1_embed = zero_action_embed
pass
else:
a_tm1_embed = zero_action_embed
a_tm1_embeds.append(a_tm1_embed)
a_tm1_embeds = torch.stack(a_tm1_embeds)
inputs = [a_tm1_embeds]
# tgt t-1 action type
for e_id, example in enumerate(examples):
if t < len(example.tgt_actions):
action_tm = example.tgt_actions[t - 1]
pre_type = self.type_embed.weight[self.grammar.type2id[type(action_tm)]]
else:
pre_type = zero_type_embed
pre_types.append(pre_type)
pre_types = torch.stack(pre_types)
inputs.append(att_tm1)
inputs.append(pre_types)
x = torch.cat(inputs, dim=-1)
src_mask = batch.src_token_mask
(h_t, cell_t), att_t, aw = self.step(x, h_tm1, src_encodings,
utterance_encodings_lf_linear, self.lf_decoder_lstm,
self.lf_att_vec_linear,
src_token_mask=src_mask, return_att_weight=True)
apply_rule_prob = F.softmax(self.production_readout(att_t), dim=-1)
table_appear_mask_val = torch.from_numpy(table_appear_mask)
if self.cuda:
table_appear_mask_val = table_appear_mask_val.cuda()
if self.use_column_pointer:
gate = F.sigmoid(self.prob_att(att_t))
weights = self.column_pointer_net(src_encodings=table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None) * table_appear_mask_val * gate + self.column_pointer_net(
src_encodings=table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None) * (1 - table_appear_mask_val) * (1 - gate)
else:
weights = self.column_pointer_net(src_encodings=table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=batch.table_token_mask)
weights.data.masked_fill_(batch.table_token_mask.bool(), -float('inf'))
column_attention_weights = F.softmax(weights, dim=-1)
table_weights = self.table_pointer_net(src_encodings=schema_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None)
schema_token_mask = batch.schema_token_mask.expand_as(table_weights)
table_weights.data.masked_fill_(schema_token_mask.bool(), -float('inf'))
table_dict = [batch_table_dict[x_id][int(x)] for x_id, x in enumerate(table_enable.tolist())]
table_mask = batch.table_dict_mask(table_dict)
table_weights.data.masked_fill_(table_mask.bool(), -float('inf'))
table_weights = F.softmax(table_weights, dim=-1)
# now get the loss
for e_id, example in enumerate(examples):
if t < len(example.tgt_actions):
action_t = example.tgt_actions[t]
if isinstance(action_t, define_rule.C):
table_appear_mask[e_id, action_t.id_c] = 1
table_enable[e_id] = action_t.id_c
act_prob_t_i = column_attention_weights[e_id, action_t.id_c]
action_probs[e_id].append(act_prob_t_i)
elif isinstance(action_t, define_rule.T):
act_prob_t_i = table_weights[e_id, action_t.id_c]
action_probs[e_id].append(act_prob_t_i)
elif isinstance(action_t, define_rule.A):
act_prob_t_i = apply_rule_prob[e_id, self.grammar.prod2id[action_t.production]]
action_probs[e_id].append(act_prob_t_i)
else:
pass
h_tm1 = (h_t, cell_t)
att_tm1 = att_t
lf_prob_var = torch.stack(
[torch.stack(action_probs_i, dim=0).log().sum() for action_probs_i in action_probs], dim=0)
return [sketch_prob_var, lf_prob_var]
else:
for t in range(batch.max_sketch_num):
if t == 0:
x = Variable(self.new_tensor(len(batch), self.sketch_decoder_lstm.input_size).zero_(),
requires_grad=False)
# 一般lstm和rnn的输入的维度组织:t(时刻,位置)是第0维,然后是batch是第1维度,最后是第2维representation
# 这里分开判断t=0,因为在t=0时刻,它没有前一时刻
else:
a_tm1_embeds = []
# 每个example在t时刻的last action的repr
pre_types = []
for e_id, example in enumerate(examples):
if t < len(example.sketch):
# get the last action
# This is the action embedding
action_tm1 = example.sketch[t - 1]
if type(action_tm1) in [define_rule.Root1,
define_rule.Root,
define_rule.Sel,
define_rule.Filter,
define_rule.Sup,
define_rule.N,
define_rule.Order]:
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
# 这样就得到了前一个action的embed
else:
# 截断,因为是sketch,action序列中就没有A->...
print(action_tm1, 'only for sketch')
quit()
a_tm1_embed = zero_action_embed
pass
else:
# 超出原本action长度的时候,前一个就设置为0
a_tm1_embed = zero_action_embed
a_tm1_embeds.append(a_tm1_embed)
a_tm1_embeds = torch.stack(a_tm1_embeds)
# stack成一个二维tensor
# a_tm1_embeds:[batch_size,action_embed_size]
inputs = [a_tm1_embeds]
for e_id, example in enumerate(examples):
if t < len(example.sketch):
action_tm = example.sketch[t - 1]
pre_type = self.type_embed.weight[self.grammar.type2id[type(action_tm)]]
# 这个还和生成语法树里面不太一样,这里是前一个action的type,而不是当前节点的type
else:
pre_type = zero_type_embed
pre_types.append(pre_type)
pre_types = torch.stack(pre_types)
# pre_types:[batch_size,type_embed_size]
inputs.append(att_tm1)
# 这个att_tm1哪里来的?我明白了,在t=0的时候走不到这里,在下面赋值了
inputs.append(pre_types)
x = torch.cat(inputs, dim=-1)
# 拼接成输入
src_mask = batch.src_token_mask
(h_t, cell_t), att_t, aw = self.step(x, h_tm1, src_encodings,
utterance_encodings_sketch_linear, self.sketch_decoder_lstm,
self.sketch_att_vec_linear,
src_token_mask=src_mask, return_att_weight=True)
sketch_attention_history.append(att_t)
# get the Root possibility
apply_rule_prob = F.softmax(self.production_readout(att_t), dim=-1)
# 对应每个grammar的概率
# 给定前t-1个action序列,第t个action的各个概率
for e_id, example in enumerate(examples):
if t < len(example.sketch):
action_t = example.sketch[t]
act_prob_t_i = apply_rule_prob[e_id, self.grammar.prod2id[action_t.production]]
action_probs[e_id].append(act_prob_t_i)
# 这个地方要改
h_tm1 = (h_t, cell_t)
att_tm1 = att_t
# action_probs:[batch_size,len(sketch)]
sketch_prob_var = torch.stack(
[torch.stack(action_probs_i, dim=0).log().sum() for action_probs_i in action_probs], dim=0)
# sketch_prob_var:[batch_size,1]
# batch中每个句子的action_prob的log和
# 算出来的这个是个啥?是给定action序列的log(P(y|x))
table_embedding = self.gen_x_batch(batch.table_sents)
# 恶心,明明就是列,为什么叫table
src_embedding = self.gen_x_batch(batch.src_sents)
schema_embedding = self.gen_x_batch(batch.table_names)
# get emb differ
embedding_differ = self.embedding_cosine(src_embedding=src_embedding, table_embedding=table_embedding,
table_unk_mask=batch.table_unk_mask)
schema_differ = self.embedding_cosine(src_embedding=src_embedding, table_embedding=schema_embedding,
table_unk_mask=batch.schema_token_mask)
tab_ctx = (src_encodings.unsqueeze(1) * embedding_differ.unsqueeze(3)).sum(2)
schema_ctx = (src_encodings.unsqueeze(1) * schema_differ.unsqueeze(3)).sum(2)
table_embedding = table_embedding + tab_ctx
schema_embedding = schema_embedding + schema_ctx
col_type = self.input_type(batch.col_hot_type)
col_type_var = self.col_type(col_type)
table_embedding = table_embedding + col_type_var
batch_table_dict = batch.col_table_dict
table_enable = np.zeros(shape=(len(examples)))
action_probs = [[] for _ in examples]
h_tm1 = dec_init_vec
for t in range(batch.max_action_num):
if t == 0:
# x = self.lf_begin_vec.unsqueeze(0).repeat(len(batch), 1)
x = Variable(self.new_tensor(len(batch), self.lf_decoder_lstm.input_size).zero_(), requires_grad=False)
else:
a_tm1_embeds = []
pre_types = []
for e_id, example in enumerate(examples):
if t < len(example.tgt_actions):
action_tm1 = example.tgt_actions[t - 1]
if type(action_tm1) in [define_rule.Root1,
define_rule.Root,
define_rule.Sel,
define_rule.Filter,
define_rule.Sup,
define_rule.N,
define_rule.Order,
]:
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
else:
if isinstance(action_tm1, define_rule.C):
a_tm1_embed = self.column_rnn_input(table_embedding[e_id, action_tm1.id_c])
elif isinstance(action_tm1, define_rule.T):
a_tm1_embed = self.table_rnn_input(schema_embedding[e_id, action_tm1.id_c])
# 发现一个错误,应该是self.table_rnn_input吧,不然都没有用过
elif isinstance(action_tm1, define_rule.A):
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
else:
print(action_tm1, 'not implement')
quit()
a_tm1_embed = zero_action_embed
pass
else:
a_tm1_embed = zero_action_embed
a_tm1_embeds.append(a_tm1_embed)
a_tm1_embeds = torch.stack(a_tm1_embeds)
inputs = [a_tm1_embeds]
# tgt t-1 action type
for e_id, example in enumerate(examples):
if t < len(example.tgt_actions):
action_tm = example.tgt_actions[t - 1]
pre_type = self.type_embed.weight[self.grammar.type2id[type(action_tm)]]
else:
pre_type = zero_type_embed
pre_types.append(pre_type)
pre_types = torch.stack(pre_types)
inputs.append(att_tm1)
inputs.append(pre_types)
x = torch.cat(inputs, dim=-1)
src_mask = batch.src_token_mask
(h_t, cell_t), att_t, aw = self.step(x, h_tm1, src_encodings,
utterance_encodings_lf_linear, self.lf_decoder_lstm,
self.lf_att_vec_linear,
src_token_mask=src_mask, return_att_weight=True)
apply_rule_prob = F.softmax(self.production_readout(att_t), dim=-1)
table_appear_mask_val = torch.from_numpy(table_appear_mask)
if self.cuda:
table_appear_mask_val = table_appear_mask_val.cuda()
if self.use_column_pointer:
gate = F.sigmoid(self.prob_att(att_t))
weights = self.column_pointer_net(src_encodings=table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None) * table_appear_mask_val * gate + self.column_pointer_net(
src_encodings=table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None) * (1 - table_appear_mask_val) * (1 - gate)
else:
weights = self.column_pointer_net(src_encodings=table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=batch.table_token_mask)
weights.data.masked_fill_(batch.table_token_mask.bool(), -float('inf'))
column_attention_weights = F.softmax(weights, dim=-1)
table_weights = self.table_pointer_net(src_encodings=schema_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None)
schema_token_mask = batch.schema_token_mask.expand_as(table_weights)
table_weights.data.masked_fill_(schema_token_mask.bool(), -float('inf'))
table_dict = [batch_table_dict[x_id][int(x)] for x_id, x in enumerate(table_enable.tolist())]
table_mask = batch.table_dict_mask(table_dict)
table_weights.data.masked_fill_(table_mask.bool(), -float('inf'))
table_weights = F.softmax(table_weights, dim=-1)
# now get the loss
for e_id, example in enumerate(examples):
if t < len(example.tgt_actions):
action_t = example.tgt_actions[t]
if isinstance(action_t, define_rule.C):
table_appear_mask[e_id, action_t.id_c] = 1
table_enable[e_id] = action_t.id_c
act_prob_t_i = column_attention_weights[e_id, action_t.id_c]
action_probs[e_id].append(act_prob_t_i)
elif isinstance(action_t, define_rule.T):
act_prob_t_i = table_weights[e_id, action_t.id_c]
action_probs[e_id].append(act_prob_t_i)
elif isinstance(action_t, define_rule.A):
act_prob_t_i = apply_rule_prob[e_id, self.grammar.prod2id[action_t.production]]
action_probs[e_id].append(act_prob_t_i)
else:
pass
h_tm1 = (h_t, cell_t)
att_tm1 = att_t
lf_prob_var = torch.stack(
[torch.stack(action_probs_i, dim=0).log().sum() for action_probs_i in action_probs], dim=0)
return [sketch_prob_var, lf_prob_var]
def parse(self, examples, beam_size=5):
"""
one example a time
:param examples:
:param beam_size:
:return:
"""
# print('example:',examples.tgt_actions)
batch = Batch([examples], self.grammar, cuda=self.args.cuda)
src_encodings, (last_state, last_cell) = self.encode(batch.src_sents, batch.src_sents_len, None)
src_encodings = self.dropout(src_encodings)
utterance_encodings_sketch_linear = self.att_sketch_linear(src_encodings)
utterance_encodings_lf_linear = self.att_lf_linear(src_encodings)
dec_init_vec = self.init_decoder_state(last_cell)
h_tm1 = dec_init_vec
t = 0
beams = [Beams(is_sketch=True)]
completed_beams = []
while len(completed_beams) < beam_size and t < self.args.decode_max_time_step:
hyp_num = len(beams)
exp_src_enconding = src_encodings.expand(hyp_num, src_encodings.size(1),
src_encodings.size(2))
exp_src_encodings_sketch_linear = utterance_encodings_sketch_linear.expand(hyp_num,
utterance_encodings_sketch_linear.size(
1),
utterance_encodings_sketch_linear.size(
2))
if t == 0:
with torch.no_grad():
x = Variable(self.new_tensor(1, self.sketch_decoder_lstm.input_size).zero_())
else:
a_tm1_embeds = []
pre_types = []
for e_id, hyp in enumerate(beams):
action_tm1 = hyp.actions[-1]
if type(action_tm1) in [define_rule.Root1,
define_rule.Root,
define_rule.Sel,
define_rule.Filter,
define_rule.Sup,
define_rule.N,
define_rule.Order]:
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
else:
raise ValueError('unknown action %s' % action_tm1)
a_tm1_embeds.append(a_tm1_embed)
a_tm1_embeds = torch.stack(a_tm1_embeds)
inputs = [a_tm1_embeds]
for e_id, hyp in enumerate(beams):
action_tm = hyp.actions[-1]
pre_type = self.type_embed.weight[self.grammar.type2id[type(action_tm)]]
pre_types.append(pre_type)
pre_types = torch.stack(pre_types)
inputs.append(att_tm1)
inputs.append(pre_types)
x = torch.cat(inputs, dim=-1)
(h_t, cell_t), att_t = self.step(x, h_tm1, exp_src_enconding,
exp_src_encodings_sketch_linear, self.sketch_decoder_lstm,
self.sketch_att_vec_linear,
src_token_mask=None)
apply_rule_log_prob = F.log_softmax(self.production_readout(att_t), dim=-1)
new_hyp_meta = []
for hyp_id, hyp in enumerate(beams):
action_class = hyp.get_availableClass()
if action_class in [define_rule.Root1,
define_rule.Root,
define_rule.Sel,
define_rule.Filter,
define_rule.Sup,
define_rule.N,
define_rule.Order]:
possible_productions = self.grammar.get_production(action_class)
for possible_production in possible_productions:
prod_id = self.grammar.prod2id[possible_production]
prod_score = apply_rule_log_prob[hyp_id, prod_id]
new_hyp_score = hyp.score + prod_score.data.cpu()
meta_entry = {'action_type': action_class, 'prod_id': prod_id,
'score': prod_score, 'new_hyp_score': new_hyp_score,
'prev_hyp_id': hyp_id}
new_hyp_meta.append(meta_entry)
else:
raise RuntimeError('No right action class')
if not new_hyp_meta: break
new_hyp_scores = torch.stack([x['new_hyp_score'] for x in new_hyp_meta], dim=0)
top_new_hyp_scores, meta_ids = torch.topk(new_hyp_scores,
k=min(new_hyp_scores.size(0),
beam_size - len(completed_beams)))
live_hyp_ids = []
new_beams = []
for new_hyp_score, meta_id in zip(top_new_hyp_scores.data.cpu(), meta_ids.data.cpu()):
action_info = ActionInfo()
hyp_meta_entry = new_hyp_meta[meta_id]
prev_hyp_id = hyp_meta_entry['prev_hyp_id']
prev_hyp = beams[prev_hyp_id]
action_type_str = hyp_meta_entry['action_type']
prod_id = hyp_meta_entry['prod_id']
if prod_id < len(self.grammar.id2prod):
production = self.grammar.id2prod[prod_id]
action = action_type_str(list(action_type_str._init_grammar()).index(production))
else:
raise NotImplementedError
action_info.action = action
action_info.t = t
action_info.score = hyp_meta_entry['score']
new_hyp = prev_hyp.clone_and_apply_action_info(action_info)
new_hyp.score = new_hyp_score
new_hyp.inputs.extend(prev_hyp.inputs)
if new_hyp.is_valid is False:
continue
if new_hyp.completed:
completed_beams.append(new_hyp)
else:
new_beams.append(new_hyp)
live_hyp_ids.append(prev_hyp_id)
if live_hyp_ids:
h_tm1 = (h_t[live_hyp_ids], cell_t[live_hyp_ids])
att_tm1 = att_t[live_hyp_ids]
beams = new_beams
t += 1
else:
break
# now get the sketch result
completed_beams.sort(key=lambda hyp: -hyp.score)
if len(completed_beams) == 0:
return [[], []]
sketch_actions = completed_beams[0].actions
# sketch_actions = examples.sketch
padding_sketch = self.padding_sketch(sketch_actions)
table_embedding = self.gen_x_batch(batch.table_sents)
src_embedding = self.gen_x_batch(batch.src_sents)
schema_embedding = self.gen_x_batch(batch.table_names)
# get emb differ
embedding_differ = self.embedding_cosine(src_embedding=src_embedding, table_embedding=table_embedding,
table_unk_mask=batch.table_unk_mask)
schema_differ = self.embedding_cosine(src_embedding=src_embedding, table_embedding=schema_embedding,
table_unk_mask=batch.schema_token_mask)
tab_ctx = (src_encodings.unsqueeze(1) * embedding_differ.unsqueeze(3)).sum(2)
schema_ctx = (src_encodings.unsqueeze(1) * schema_differ.unsqueeze(3)).sum(2)
table_embedding = table_embedding + tab_ctx
schema_embedding = schema_embedding + schema_ctx
col_type = self.input_type(batch.col_hot_type)
col_type_var = self.col_type(col_type)
table_embedding = table_embedding + col_type_var
batch_table_dict = batch.col_table_dict
h_tm1 = dec_init_vec
t = 0
beams = [Beams(is_sketch=False)]
completed_beams = []
while len(completed_beams) < beam_size and t < self.args.decode_max_time_step:
hyp_num = len(beams)
# expand value
exp_src_encodings = src_encodings.expand(hyp_num, src_encodings.size(1),
src_encodings.size(2))
exp_utterance_encodings_lf_linear = utterance_encodings_lf_linear.expand(hyp_num,
utterance_encodings_lf_linear.size(
1),
utterance_encodings_lf_linear.size(
2))
exp_table_embedding = table_embedding.expand(hyp_num, table_embedding.size(1),
table_embedding.size(2))
exp_schema_embedding = schema_embedding.expand(hyp_num, schema_embedding.size(1),
schema_embedding.size(2))
table_appear_mask = batch.table_appear_mask
table_appear_mask = np.zeros((hyp_num, table_appear_mask.shape[1]), dtype=np.float32)
table_enable = np.zeros(shape=(hyp_num))
for e_id, hyp in enumerate(beams):
for act in hyp.actions:
if type(act) == define_rule.C:
table_appear_mask[e_id][act.id_c] = 1
table_enable[e_id] = act.id_c
if t == 0:
with torch.no_grad():
x = Variable(self.new_tensor(1, self.lf_decoder_lstm.input_size).zero_())
else:
a_tm1_embeds = []
pre_types = []
for e_id, hyp in enumerate(beams):
action_tm1 = hyp.actions[-1]
if type(action_tm1) in [define_rule.Root1,
define_rule.Root,
define_rule.Sel,
define_rule.Filter,
define_rule.Sup,
define_rule.N,
define_rule.Order]:
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
hyp.sketch_step += 1
elif isinstance(action_tm1, define_rule.C):
a_tm1_embed = self.column_rnn_input(table_embedding[0, action_tm1.id_c])
elif isinstance(action_tm1, define_rule.T):
a_tm1_embed = self.column_rnn_input(schema_embedding[0, action_tm1.id_c])
elif isinstance(action_tm1, define_rule.A):
a_tm1_embed = self.production_embed.weight[self.grammar.prod2id[action_tm1.production]]
else:
raise ValueError('unknown action %s' % action_tm1)
a_tm1_embeds.append(a_tm1_embed)
a_tm1_embeds = torch.stack(a_tm1_embeds)
inputs = [a_tm1_embeds]
for e_id, hyp in enumerate(beams):
action_tm = hyp.actions[-1]
pre_type = self.type_embed.weight[self.grammar.type2id[type(action_tm)]]
pre_types.append(pre_type)
pre_types = torch.stack(pre_types)
inputs.append(att_tm1)
inputs.append(pre_types)
x = torch.cat(inputs, dim=-1)
(h_t, cell_t), att_t = self.step(x, h_tm1, exp_src_encodings,
exp_utterance_encodings_lf_linear, self.lf_decoder_lstm,
self.lf_att_vec_linear,
src_token_mask=None)
apply_rule_log_prob = F.log_softmax(self.production_readout(att_t), dim=-1)
table_appear_mask_val = torch.from_numpy(table_appear_mask)
if self.args.cuda: table_appear_mask_val = table_appear_mask_val.cuda()
if self.use_column_pointer:
gate = F.sigmoid(self.prob_att(att_t))
weights = self.column_pointer_net(src_encodings=exp_table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None) * table_appear_mask_val * gate + self.column_pointer_net(
src_encodings=exp_table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None) * (1 - table_appear_mask_val) * (1 - gate)
# weights = weights + self.col_attention_out(exp_embedding_differ).squeeze()
else:
weights = self.column_pointer_net(src_encodings=exp_table_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=batch.table_token_mask)
# weights.data.masked_fill_(exp_col_pred_mask, -float('inf'))
column_selection_log_prob = F.log_softmax(weights, dim=-1)
table_weights = self.table_pointer_net(src_encodings=exp_schema_embedding, query_vec=att_t.unsqueeze(0),
src_token_mask=None)
# table_weights = self.table_pointer_net(src_encodings=exp_schema_embedding, query_vec=att_t.unsqueeze(0), src_token_mask=None)
schema_token_mask = batch.schema_token_mask.expand_as(table_weights)
table_weights.data.masked_fill_(schema_token_mask.bool(), -float('inf'))
table_dict = [batch_table_dict[0][int(x)] for x_id, x in enumerate(table_enable.tolist())]
table_mask = batch.table_dict_mask(table_dict)
table_weights.data.masked_fill_(table_mask.bool(), -float('inf'))
table_weights = F.log_softmax(table_weights, dim=-1)
new_hyp_meta = []
for hyp_id, hyp in enumerate(beams):
# TODO: should change this
if type(padding_sketch[t]) == define_rule.A:
possible_productions = self.grammar.get_production(define_rule.A)
for possible_production in possible_productions:
prod_id = self.grammar.prod2id[possible_production]
prod_score = apply_rule_log_prob[hyp_id, prod_id]
new_hyp_score = hyp.score + prod_score.data.cpu()
meta_entry = {'action_type': define_rule.A, 'prod_id': prod_id,
'score': prod_score, 'new_hyp_score': new_hyp_score,
'prev_hyp_id': hyp_id}
new_hyp_meta.append(meta_entry)
elif type(padding_sketch[t]) == define_rule.C:
for col_id, _ in enumerate(batch.table_sents[0]):
col_sel_score = column_selection_log_prob[hyp_id, col_id]
new_hyp_score = hyp.score + col_sel_score.data.cpu()
meta_entry = {'action_type': define_rule.C, 'col_id': col_id,
'score': col_sel_score, 'new_hyp_score': new_hyp_score,
'prev_hyp_id': hyp_id}
new_hyp_meta.append(meta_entry)
elif type(padding_sketch[t]) == define_rule.T:
for t_id, _ in enumerate(batch.table_names[0]):
t_sel_score = table_weights[hyp_id, t_id]
new_hyp_score = hyp.score + t_sel_score.data.cpu()
meta_entry = {'action_type': define_rule.T, 't_id': t_id,
'score': t_sel_score, 'new_hyp_score': new_hyp_score,
'prev_hyp_id': hyp_id}
new_hyp_meta.append(meta_entry)
else:
prod_id = self.grammar.prod2id[padding_sketch[t].production]
new_hyp_score = hyp.score + torch.tensor(0.0)
meta_entry = {'action_type': type(padding_sketch[t]), 'prod_id': prod_id,
'score': torch.tensor(0.0), 'new_hyp_score': new_hyp_score,
'prev_hyp_id': hyp_id}
new_hyp_meta.append(meta_entry)
if not new_hyp_meta: break
new_hyp_scores = torch.stack([x['new_hyp_score'] for x in new_hyp_meta], dim=0)
top_new_hyp_scores, meta_ids = torch.topk(new_hyp_scores,
k=min(new_hyp_scores.size(0),
beam_size - len(completed_beams)))
live_hyp_ids = []
new_beams = []
for new_hyp_score, meta_id in zip(top_new_hyp_scores.data.cpu(), meta_ids.data.cpu()):
action_info = ActionInfo()
hyp_meta_entry = new_hyp_meta[meta_id]
prev_hyp_id = hyp_meta_entry['prev_hyp_id']
prev_hyp = beams[prev_hyp_id]
action_type_str = hyp_meta_entry['action_type']
if 'prod_id' in hyp_meta_entry:
prod_id = hyp_meta_entry['prod_id']
if action_type_str == define_rule.C:
col_id = hyp_meta_entry['col_id']
action = define_rule.C(col_id)
elif action_type_str == define_rule.T:
t_id = hyp_meta_entry['t_id']
action = define_rule.T(t_id)
elif prod_id < len(self.grammar.id2prod):
production = self.grammar.id2prod[prod_id]
action = action_type_str(list(action_type_str._init_grammar()).index(production))
else:
raise NotImplementedError
action_info.action = action
action_info.t = t
action_info.score = hyp_meta_entry['score']
new_hyp = prev_hyp.clone_and_apply_action_info(action_info)
new_hyp.score = new_hyp_score
new_hyp.inputs.extend(prev_hyp.inputs)
if new_hyp.is_valid is False:
continue
if new_hyp.completed:
completed_beams.append(new_hyp)
else:
new_beams.append(new_hyp)
live_hyp_ids.append(prev_hyp_id)
if live_hyp_ids:
h_tm1 = (h_t[live_hyp_ids], cell_t[live_hyp_ids])
att_tm1 = att_t[live_hyp_ids]
beams = new_beams
t += 1
else:
break
completed_beams.sort(key=lambda hyp: -hyp.score)
# print('completed_beams',completed_beams)
# print('sketch_actions',sketch_actions)
return [completed_beams, sketch_actions]
def step(self, x, h_tm1, src_encodings, src_encodings_att_linear, decoder, attention_func, src_token_mask=None,
return_att_weight=False):
# h_t: (batch_size, hidden_size)
h_t, cell_t = decoder(x, h_tm1)
ctx_t, alpha_t = nn_utils.dot_prod_attention(h_t,
src_encodings, src_encodings_att_linear,
mask=src_token_mask)
att_t = F.tanh(attention_func(torch.cat([h_t, ctx_t], 1)))
# 不单纯了!,搞了好多操作
att_t = self.dropout(att_t)
if return_att_weight:
return (h_t, cell_t), att_t, alpha_t
else:
return (h_t, cell_t), att_t
def init_decoder_state(self, enc_last_cell):
h_0 = self.decoder_cell_init(enc_last_cell)
h_0 = F.tanh(h_0)
return h_0, Variable(self.new_tensor(h_0.size()).zero_())
| 49.485688
| 140
| 0.511877
| 5,802
| 53,593
| 4.359531
| 0.067563
| 0.030837
| 0.012256
| 0.005693
| 0.814857
| 0.775757
| 0.753578
| 0.742785
| 0.729857
| 0.714517
| 0
| 0.010837
| 0.412871
| 53,593
| 1,082
| 141
| 49.531423
| 0.793015
| 0.057433
| 0
| 0.755937
| 0
| 0
| 0.01049
| 0
| 0
| 0
| 0
| 0.000924
| 0
| 1
| 0.007916
| false
| 0.007916
| 0.01847
| 0
| 0.038259
| 0.006596
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc9f435695eab4cc8089e1eaa89a862c92bdf49f
| 9,692
|
py
|
Python
|
sdk/python-sdk/test/protocols/v1_0/test_Relationship.py
|
yankeexe/verity-sdk
|
8b7afed77048c1698dbc7349348c68870f5e4eb8
|
[
"Apache-2.0"
] | null | null | null |
sdk/python-sdk/test/protocols/v1_0/test_Relationship.py
|
yankeexe/verity-sdk
|
8b7afed77048c1698dbc7349348c68870f5e4eb8
|
[
"Apache-2.0"
] | null | null | null |
sdk/python-sdk/test/protocols/v1_0/test_Relationship.py
|
yankeexe/verity-sdk
|
8b7afed77048c1698dbc7349348c68870f5e4eb8
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from test.test_utils import get_test_config
from verity_sdk.protocols.v1_0.Relationship import Relationship, GoalsList
from verity_sdk.utils import EVERNYM_MSG_QUALIFIER
from verity_sdk.utils.Context import Context
for_relationship = 'RxRJCMe5XNqc9e9J1YPwhL'
thread_id = '7a80285e-896c-45f6-b386-39ed7c49230c'
label = 'test_label'
logo_url = 'logo_url'
phone_number = '+18011234567'
short_invite = False
def test_init():
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id,
label=label,
logo_url=logo_url,
)
assert relationship.label == label
assert relationship.for_relationship == for_relationship
assert relationship.thread_id == thread_id
assert relationship.logo_url == logo_url
relationship = Relationship(
label=None
)
assert relationship.label is None
assert relationship.logo_url is None
@pytest.mark.asyncio
async def test_create():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id,
label=label
)
msg = relationship.create_msg(context)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.CREATE
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['label'] == label
assert 'logoUrl' not in msg
assert 'phoneNumber' not in msg
@pytest.mark.asyncio
async def test_create_with_logo_url():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id,
label=label,
logo_url=logo_url
)
msg = relationship.create_msg(context)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.CREATE
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['label'] == label
assert msg['logoUrl'] == logo_url
assert 'phoneNumber' not in msg
@pytest.mark.asyncio
async def test_create_with_phone_number():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id,
label=label,
phone_number=phone_number
)
msg = relationship.create_msg(context)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.CREATE
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['label'] == label
assert 'logoUrl' not in msg
assert msg['phoneNumber'] == phone_number
@pytest.mark.asyncio
async def test_connection_invitation():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id,
label=label
)
msg = relationship.connection_invitation_msg(context)
print(msg)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.CONNECTION_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
assert 'shortInvite' not in msg
@pytest.mark.asyncio
async def test_connection_invitation_with_short_invite():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id
)
msg = relationship.connection_invitation_msg(context, short_invite=short_invite)
print(msg)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.CONNECTION_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
assert msg['shortInvite'] == short_invite
@pytest.mark.asyncio
async def test_sms_connection_invitation():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id
)
msg = relationship.sms_connection_invitation_msg(context)
print(msg)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.SMS_CONNECTION_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
@pytest.mark.asyncio
async def test_out_of_band_invitation():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id
)
msg = relationship.out_of_band_invitation_msg(context)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.OUT_OF_BAND_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
assert msg['goalCode'] == GoalsList.P2P_MESSAGING.value.code
assert msg['goal'] == GoalsList.P2P_MESSAGING.value.name
assert 'shortInvite' not in msg
@pytest.mark.asyncio
async def test_out_of_band_invitation_with_goal():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id
)
msg = relationship.out_of_band_invitation_msg(context, goal=GoalsList.ISSUE_VC)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.OUT_OF_BAND_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
assert msg['goalCode'] == GoalsList.ISSUE_VC.value.code
assert msg['goal'] == GoalsList.ISSUE_VC.value.name
assert 'shortInvite' not in msg
@pytest.mark.asyncio
async def test_out_of_band_invitation_with_short_invite():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id
)
msg = relationship.out_of_band_invitation_msg(context, short_invite=short_invite)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.OUT_OF_BAND_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
assert msg['goalCode'] == GoalsList.P2P_MESSAGING.value.code
assert msg['goal'] == GoalsList.P2P_MESSAGING.value.name
assert msg['shortInvite'] == short_invite
@pytest.mark.asyncio
async def test_sms_out_of_band_invitation():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id
)
msg = relationship.sms_out_of_band_invitation_msg(context)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.SMS_OUT_OF_BAND_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
assert msg['goalCode'] == GoalsList.P2P_MESSAGING.value.code
assert msg['goal'] == GoalsList.P2P_MESSAGING.value.name
@pytest.mark.asyncio
async def test_sms_out_of_band_invitation_with_goal():
context = await Context.create_with_config(await get_test_config())
relationship = Relationship(
for_relationship=for_relationship,
thread_id=thread_id
)
msg = relationship.sms_out_of_band_invitation_msg(context, goal=GoalsList.ISSUE_VC)
assert msg['@type'] == '{}/{}/{}/{}'.format(
EVERNYM_MSG_QUALIFIER,
Relationship.MSG_FAMILY,
Relationship.MSG_FAMILY_VERSION,
Relationship.SMS_OUT_OF_BAND_INVITATION
)
assert msg['@id'] is not None
assert msg['~thread'] is not None
assert msg['~thread']['thid'] is not None
assert msg['~for_relationship'] == for_relationship
assert msg['goalCode'] == GoalsList.ISSUE_VC.value.code
assert msg['goal'] == GoalsList.ISSUE_VC.value.name
| 31.570033
| 87
| 0.691498
| 1,163
| 9,692
| 5.492691
| 0.062769
| 0.097214
| 0.143707
| 0.077489
| 0.910927
| 0.896055
| 0.89449
| 0.878992
| 0.872887
| 0.872887
| 0
| 0.005926
| 0.19903
| 9,692
| 306
| 88
| 31.673203
| 0.816952
| 0
| 0
| 0.733333
| 0
| 0
| 0.08409
| 0.005984
| 0
| 0
| 0
| 0
| 0.321569
| 1
| 0.003922
| false
| 0
| 0.019608
| 0
| 0.023529
| 0.011765
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bceba2dc6e55249c94726cbe225add097b6980da
| 47,575
|
py
|
Python
|
src/genie/libs/parser/iosxr/show_routing.py
|
Tristou27/genieparser
|
7191e89b29d0711c604c2f4cce2ff6a093d3e4d6
|
[
"Apache-2.0"
] | 1
|
2021-03-24T14:11:44.000Z
|
2021-03-24T14:11:44.000Z
|
src/genie/libs/parser/iosxr/show_routing.py
|
Tristou27/genieparser
|
7191e89b29d0711c604c2f4cce2ff6a093d3e4d6
|
[
"Apache-2.0"
] | 4
|
2021-03-24T04:25:38.000Z
|
2021-03-28T04:31:21.000Z
|
src/genie/libs/parser/iosxr/show_routing.py
|
Tristou27/genieparser
|
7191e89b29d0711c604c2f4cce2ff6a093d3e4d6
|
[
"Apache-2.0"
] | 1
|
2021-04-05T22:05:15.000Z
|
2021-04-05T22:05:15.000Z
|
'''
show_route.py
'''
import re
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, \
Any, \
Optional
# ====================================================
# schema for show route ipv4
# ====================================================
class ShowRouteIpv4Schema(MetaParser):
"""Schema for show route ipv4"""
schema = {
'vrf': {
Any(): {
'address_family': {
Any(): {
Optional('routes'): {
Any(): {
'route': str,
'active': bool,
Optional('ip'): str,
Optional('mask'): str,
Optional('route_preference'): int,
Optional('metric'): int,
Optional('source_protocol'): str,
Optional('source_protocol_codes'): str,
Optional('known_via'): str,
Optional('distance'): int,
Optional('type'): str,
Optional('tag'): str,
Optional('installed'): {
'date': str,
'for': str,
},
Optional('redist_advertisers'): {
Any(): {
'protoid': int,
'clientid': int,
},
},
Optional('next_hop'): {
Optional('outgoing_interface'): {
Any(): {
'outgoing_interface': str,
Optional('updated'): str,
Optional('metric'): int,
}
},
Optional('next_hop_list'): {
int: { # index
'index': int,
Optional('next_hop'): str,
Optional('outgoing_interface'): str,
Optional('updated'): str,
Optional('metric'): int,
Optional('from'): str,
Optional('table'): str,
Optional('address_family'): str,
Optional('table_id'): str,
Optional('nexthop_in_vrf'): str,
}
}
}
}
},
},
},
Optional('last_resort'): {
Optional('gateway'): str,
Optional('to_network'): str,
},
},
}
}
# ====================================================
# parser for show route ipv4
# ====================================================
class ShowRouteIpv4(ShowRouteIpv4Schema):
cli_command = [
'show route ipv4',
'show route vrf {vrf} ipv4',
'show route ipv4 {protocol}',
'show route vrf {vrf} ipv4 {protocol}',
'show route ipv4 {route}',
'show route vrf {vrf} ipv4 {route}'
]
"""
Codes: C - connected, S - static, R - RIP, B - BGP, (>) - Diversion path
D - EIGRP, EX - EIGRP external, O - OSPF, IA - OSPF inter area
N1 - OSPF NSSA external type 1, N2 - OSPF NSSA external type 2
E1 - OSPF external type 1, E2 - OSPF external type 2, E - EGP
i - ISIS, L1 - IS-IS level-1, L2 - IS-IS level-2
ia - IS-IS inter area, su - IS-IS summary null, * - candidate default
U - per-user static route, o - ODR, L - local, G - DAGR, l - LISP
A - access/subscriber, a - Application route
M - mobile route, r - RPL, t - Traffic Engineering, (!) - FRR Backup path
"""
source_protocol_dict = {
'ospf': ['O', 'IA', 'N1', 'N2', 'E1', 'E2'],
'odr': ['o'],
'isis': ['i', 'su', 'L1', 'L2', 'ia'],
'eigrp': ['D', 'EX'],
'static': ['S'],
'egp': ['E'],
'dagr': ['G'],
'rpl': ['r'],
'mobile router': ['M'],
'lisp': ['I', 'l'],
'nhrp': ['H'],
'local': ['L'],
'connected': ['C'],
'bgp': ['B'],
'rip': ['R'],
'per-user static route': ['U'],
'access/subscriber': ['A'],
'traffic engineering': ['t'],
}
protocol_set = {'ospf', 'odr', 'isis', 'eigrp', 'static', 'mobile',
'rip', 'lisp', 'nhrp', 'local', 'connected', 'bgp'}
def cli(self, vrf=None, route=None, protocol=None, output=None):
# Check if argument from device.parse is protocol or route
if protocol and protocol not in self.protocol_set:
route = protocol
protocol = None
if output is None:
if vrf and route:
cmd = self.cli_command[5].format(
vrf=vrf,
route=route
)
elif vrf and protocol:
cmd = self.cli_command[3].format(
vrf=vrf,
protocol=protocol
)
elif vrf:
cmd = self.cli_command[1].format(
vrf=vrf
)
elif protocol:
cmd = self.cli_command[2].format(
protocol=protocol
)
elif route:
cmd = self.cli_command[4].format(
route=route
)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
# VRF: VRF501
# VRF: L:123
p1 = re.compile(r'^\s*VRF: +(?P<vrf>\S+)$')
# R 10.1.0.0/8 [120/1] via 10.12.120.1, 1w0d, GigabitEthernet0/0/0/0.120
# B 10.21.33.33/32 [200/0] via 10.166.13.13, 00:52:31
# i L2 10.154.219.32/32 [115/100030] via 10.4.1.1, 1d06h, HundredGigE0/0/1/1 (!)
# S 10.36.3.3/32 [1/0] via 10.2.3.3, 01:51:13, GigabitEthernet0/0/0/1
# B 10.19.31.31/32 [200/0] via 10.229.11.11, 00:55:14
# i L1 10.76.23.23/32 [115/11] via 10.2.3.3, 00:52:41, GigabitEthernet0/0/0/1
# S* 192.168.4.4/10 [111/10] via 172.16.84.11, 1w0d
# R 10.145.110.10/4 [10/10] via 192.168.10.12, 12:03:42, GigabitEthernet0/0/1/1.1
# B 10.100.3.160/31 [200/0] via 172.23.6.198 (nexthop in vrf default), 5d13h
p2 = re.compile(r'^(?P<code1>[\w](\*)*)\s*(?P<code2>\S+)? +(?P<network>\S+) +'
r'\[(?P<route_preference>\d+)\/(?P<metric>\d+)\] +via +'
r'(?P<next_hop>\S+)( +\(nexthop +in +vrf +\w+\))?,'
r'( +(?P<date>[\w:]+),?)?( +(?P<interface>[\w\/\.\-]+))?'
r'( +(?P<code3>[\w\*\(\>\)\!]+))?$')
# [90/15360] via 10.23.90.3, 1w0d, GigabitEthernet0/0/0/1.90
# [110/2] via 10.1.2.1, 01:50:49, GigabitEthernet0/0/0/3
p3 = re.compile(r'^\[(?P<route_preference>\d+)\/(?P<metric>\d+)\] +via +'
r'(?P<next_hop>\S+),( +(?P<date>[\w:]+))?,? +'
r'(?P<interface>[\w\/\.\-]+)$')
# L 10.16.2.2/32 is directly connected, 3w5d, Loopback0
# is directly connected, 01:51:13, GigabitEthernet0/0/0/3
# S 10.4.1.1/32 is directly connected, 01:51:13, GigabitEthernet0/0/0/0
# S 10.2.2.2/32 is directly connected, 00:06:36, Null0
p4 = re.compile(r'^((?P<code1>[\w])\s*(?P<code2>\S+)?(\s+'
r'(?P<network>\S+)\s+))?(is\s+directly\s+connected,\s+'
r'(?P<date>[\w:]+))?,?\s+(?P<interface>[\w\/\.\-]+)?$')
# Routing entry for 10.151.0.0/24, 1 known subnets
# Routing entry for 0.0.0.0/0, supernet
# Routing entry for 192.168.154.0/24
p5 = re.compile(r'^Routing +entry +for +(?P<network>(?P<ip>[\w\:\.]+)'
r'\/(?P<mask>\d+))(?:, +(?P<net>[\w\s]+))?$')
# Known via "connected", distance 0, metric 0 (connected)
# Known via "eigrp 1", distance 130, metric 10880, type internal
# Known via "bgp 65161", distance 20, metric 0, candidate default path
# Known via "ospf 3", distance 110, metric 32001, type extern 1
p6 = re.compile(r'^Known +via +\"(?P<known_via>[\w ]+)\", +distance +'
r'(?P<distance>\d+), +metric +(?P<metric>\d+)( \(connected\))?'
r'(, +type +(?P<type>[\S\s]+))?(, +candidate +default +path)?$')
# * directly connected, via GigabitEthernet1.120
p7 = re.compile(r'^(\* +)?directly +connected, via +(?P<interface>\S+)$')
# Route metric is 10880, traffic share count is 1
# Route metric is 0, Wt is 1
p8 = re.compile(r'^Route +metric +is +(?P<metric>\d+)(, +'
r'traffic +share +count +is +(?P<share_count>\d+))?'
r'(, +Wt +is +\d+)?$')
# eigrp/100 (protoid=5, clientid=22)
p9 = re.compile(r'^(?P<redist_advertiser>\S+) +\(protoid=(?P<protoid>\d+)'
r', +clientid=(?P<clientid>\d+)\)$')
# Installed Oct 23 22:09:38.380 for 5d21h
p10 = re.compile(r'^Installed +(?P<date>[\S\s]+) +for +(?P<for>\S+)$')
# 10.12.90.1, from 10.12.90.1, via GigabitEthernet0/0/0/0.90
# 172.23.6.96, from 172.23.15.196
# 172.25.253.121, from 172.25.253.121, BGP external
# 2001:10::1, via GigabitEthernet0/0/0/0
p11 = re.compile(r'^(?P<nexthop>\S+)(,\s+from\s+(?P<from>\S+))?(, '
r'+via\s+(?P<interface>\S+))?'
r'(, +BGP external)?$')
# R2_xrv#show route ipv4
# Routing Descriptor Blocks
# No advertising protos.
p12 = re.compile(r'^((\S+#)?(show +route))|(Routing +Descriptor +'
r'Blocks)|(No +advertising +protos\.)|(Redist +Advertisers:)')
# Tag 10584, type internal
p13 = re.compile(r'^Tag\s+(?P<tag>\d+)\,\s+type\s+(?P<type>\w+)$')
# Nexthop in Vrf: "default", Table: "default", IPv4 Unicast, Table Id: 0xe0000000
p14 = re.compile(r'^Nexthop\s+in\s+[V|v]rf\:\s+\"(?P<interface>\w+)\"\, '
r'+[T|t]able\:\s+\"(?P<table>\w+)\"\, '
r'+(?P<address_family>[\w\s]+)\,\s+[T|t]able '
r'+[I|i]d\:\s+(?P<table_id>\S+)$')
# Gateway of last resort is 172.16.0.88 to network 0.0.0.0
p15 = re.compile(r'^Gateway +of +last +resort +is '
r'+(?P<gateway>(not +set)|\S+)( +to +network '
r'+(?P<to_network>\S+))?$')
# initial variables
ret_dict = {}
index = 0
address_family = 'ipv4'
if not vrf:
vrf = 'default'
for line in out.splitlines():
line = line.strip()
# R2_xrv#show route ipv4
# Routing Descriptor Blocks
# No advertising protos.
m = p12.match(line)
if m or not line:
continue
# VRF: VRF501
# VRF: L:123
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
continue
# R 10.1.0.0/8 [120/1] via 10.12.120.1, 1w0d, GigabitEthernet0/0/0/0.120
m = p2.match(line)
if m:
group = m.groupdict()
code1 = group['code1']
source_protocol_code = re.split('\*|\(\!\)|\(\>\)', code1)[0].strip()
for key,val in self.source_protocol_dict.items():
if source_protocol_code in val:
source_protocol = key
code2 = group['code2']
if code2:
code1 = '{} {}'.format(code1, code2)
code3 = group['code3']
if code3:
code1 = '{} {}'.format(code1, code3)
network = group['network']
route_preference = int(group['route_preference'])
metric = int(group['metric'])
next_hop = group['next_hop']
updated = group['date']
interface = group['interface']
route_dict = ret_dict.setdefault('vrf', {}). \
setdefault(vrf, {}). \
setdefault('address_family', {}). \
setdefault(address_family, {}). \
setdefault('routes', {}). \
setdefault(network, {})
route_dict.update({'route': network})
route_dict.update({'active': True})
route_dict.update({'route_preference': route_preference})
route_dict.update({'metric': metric})
route_dict.update({'source_protocol': source_protocol})
route_dict.update({'source_protocol_codes': code1})
index = 1
next_hop_list_dict = route_dict.setdefault('next_hop', {}). \
setdefault('next_hop_list', {}). \
setdefault(int(index), {})
next_hop_list_dict.update({'index': index})
next_hop_list_dict.update({'next_hop': next_hop})
if interface:
next_hop_list_dict.update({'outgoing_interface': interface})
if updated:
next_hop_list_dict.update({'updated': updated})
continue
# [90/15360] via 10.23.90.3, 1w0d, GigabitEthernet0/0/0/1.90
m = p3.match(line)
if m:
group = m.groupdict()
route_preference = int(group['route_preference'])
metric = int(group['metric'])
next_hop = group['next_hop']
updated = group['date']
interface = group['interface']
route_dict.update({'route_preference': route_preference})
route_dict.update({'metric': metric})
index += 1
next_hop_list_dict = route_dict.setdefault('next_hop', {}). \
setdefault('next_hop_list', {}). \
setdefault(int(index), {})
next_hop_list_dict.update({'index': index})
next_hop_list_dict.update({'next_hop': next_hop})
if interface:
next_hop_list_dict.update({'outgoing_interface': interface})
if updated:
next_hop_list_dict.update({'updated': updated})
continue
# L 10.16.2.2/32 is directly connected, 3w5d, Loopback0
# is directly connected, 01:51:13, GigabitEthernet0/0/0/3
# S 10.2.2.2/32 is directly connected, 00:06:36, Null0
m = p4.match(line)
if m:
try:
group = m.groupdict()
code1 = group.get('code1', None)
source_protocol = None
network = group.get('network', None)
updated = group.get('date', None)
interface = group.get('interface', None)
if network:
route_dict = ret_dict.setdefault('vrf', {}). \
setdefault(vrf, {}). \
setdefault('address_family', {}). \
setdefault(address_family, {}). \
setdefault('routes', {}). \
setdefault(network, {})
route_dict.update({'route': network})
route_dict.update({'active': True})
if code1:
source_protocol_code = re.split('\*|\(\!\)|\(\>\)', code1)[0].strip()
for key,val in self.source_protocol_dict.items():
if source_protocol_code in val:
source_protocol = key
code2 = group.get('code2', None)
if code2:
code1 = '{} {}'.format(code1, code2)
if source_protocol:
route_dict.update({'source_protocol': source_protocol})
route_dict.update({'source_protocol_codes': code1})
outgoing_interface_dict = route_dict.setdefault('next_hop', {}). \
setdefault('outgoing_interface', {}). \
setdefault(interface, {})
if interface:
outgoing_interface_dict.update({'outgoing_interface': interface})
if updated:
outgoing_interface_dict.update({'updated': updated})
except Exception:
print('--->'+line)
continue
# Routing entry for 10.151.0.0/24, 1 known subnets
# Routing entry for 0.0.0.0/0, supernet
# Routing entry for 192.168.154.0/24
m = p5.match(line)
if m:
group = m.groupdict()
network = group['network']
ip = group['ip']
mask = group['mask']
route_dict = ret_dict.setdefault('vrf', {}). \
setdefault(vrf, {}). \
setdefault('address_family', {}). \
setdefault(address_family, {}). \
setdefault('routes', {}). \
setdefault(network, {})
route_dict.update({'route': network})
route_dict.update({'ip': ip})
route_dict.update({'mask': mask})
route_dict.update({'active': True})
continue
# Known via "static", distance 1, metric 0, candidate default path
# Known via "eigrp 1", distance 130, metric 10880, type internal
# Known via "rip", distance 120, metric 2
# Known via "connected", distance 0, metric 0 (connected)
# Known via "eigrp 1", distance 130, metric 10880, type internal
# Known via "bgp 65161", distance 20, metric 0, candidate default path
# Known via "ospf 3", distance 110, metric 32001, type extern 1
m = p6.match(line)
if m:
group = m.groupdict()
known_via = group['known_via']
metric = int(group['metric'])
distance = int(group['distance'])
_type = group['type']
route_dict.update({'known_via': known_via})
route_dict.update({'metric': metric})
route_dict.update({'distance': distance})
if _type:
route_dict.update({'type': _type})
continue
# * directly connected, via GigabitEthernet1.120
m = p7.match(line)
if m:
group = m.groupdict()
code1 = group.get('code1', None)
source_protocol = None
network = group.get('network', None)
updated = group.get('date', None)
interface = group.get('interface', None)
if network:
route_dict = ret_dict.setdefault('vrf', {}). \
setdefault(vrf, {}). \
setdefault('address_family', {}). \
setdefault(address_family, {}). \
setdefault('routes', {}). \
setdefault(network, {})
route_dict.update({'route': network})
route_dict.update({'active': True})
if code1:
source_protocol_code = re.split('\*|\(\!\)|\(\>\)', code1)[0].strip()
for key,val in self.source_protocol_dict.items():
if source_protocol_code in val:
source_protocol = key
code2 = group.get('code2', None)
if code2:
code1 = '{} {}'.format(code1, code2)
route_dict.update({'source_protocol': source_protocol})
route_dict.update({'source_protocol_codes': code1})
if interface:
outgoing_interface_dict = route_dict.setdefault('next_hop', {}). \
setdefault('outgoing_interface', {}). \
setdefault(interface, {})
outgoing_interface_dict.update({'outgoing_interface': interface})
if updated:
outgoing_interface_dict.update({'updated': updated})
# Route metric is 10880, traffic share count is 1
m = p8.match(line)
if m:
group = m.groupdict()
metric = int(group['metric'])
outgoing_interface_dict.update({'metric': metric})
if group.get('share_count', None):
share_count = int(group['share_count'])
outgoing_interface_dict.update({'share_count': share_count})
# outgoing_interface_dict.update({k:v for k,v in group.items() if v})
continue
# eigrp/100 (protoid=5, clientid=22)
m = p9.match(line)
if m:
group = m.groupdict()
redist_advertiser = group['redist_advertiser']
protoid = int(group['protoid'])
clientid = int(group['clientid'])
redist_advertiser_dict = route_dict.setdefault('redist_advertisers', {}). \
setdefault(redist_advertiser, {})
redist_advertiser_dict.update({'protoid': protoid})
redist_advertiser_dict.update({'clientid': clientid})
continue
# Installed Oct 23 22:09:38.380 for 5d21h
m = p10.match(line)
if m:
group = m.groupdict()
installed_dict = route_dict.setdefault('installed', {})
installed_dict.update({k:v for k,v in group.items() if v})
continue
# 10.12.90.1, from 10.12.90.1, via GigabitEthernet0/0/0/0.90
# 172.23.6.96, from 172.23.15.196
m = p11.match(line)
if m:
group = m.groupdict()
nexthop = group['nexthop']
_from = group['from']
interface = group['interface']
index += 1
outgoing_interface_dict = route_dict.setdefault('next_hop', {}). \
setdefault('next_hop_list', {}). \
setdefault(int(index), {})
outgoing_interface_dict.update({'index': index})
if interface:
outgoing_interface_dict.update({'outgoing_interface': interface})
if _from:
outgoing_interface_dict.update({'from': _from})
outgoing_interface_dict.update({'next_hop': nexthop})
continue
# Tag 10584, type internal
m13 = p13.match(line)
if m13:
group = m13.groupdict()
route_dict.update({'tag': group['tag']})
route_dict.update({'type': group['type']})
continue
# Nexthop in Vrf: "default", Table: "default", IPv4 Unicast, Table Id: 0xe0000000
m14 = p14.match(line)
if m14:
group = m14.groupdict()
interface = group['interface']
table = group['table']
address_family = group['address_family']
table_id = group['table_id']
if interface:
nexthop_intf_dict = route_dict.setdefault('next_hop', {}).\
setdefault('next_hop_list', {}). \
setdefault(int(index), {})
nexthop_intf_dict.update({'index': index})
if interface:
nexthop_intf_dict.update({'nexthop_in_vrf': interface})
nexthop_intf_dict.update({'table': table})
nexthop_intf_dict.update({'address_family': address_family})
nexthop_intf_dict.update({'table_id': table_id})
continue
# Gateway of last resort is 172.16.0.88 to network 0.0.0.0
m15 = p15.match(line)
if m15:
group = m15.groupdict()
gw_dict = ret_dict.setdefault('vrf', {}).\
setdefault(vrf, {}).\
setdefault('last_resort', {})
gw_dict.update({'gateway': group['gateway']})
if group['to_network']:
gw_dict.update({'to_network': group['to_network']})
return ret_dict
# ====================================================
# parser for show route ipv6
# ====================================================
class ShowRouteIpv6(ShowRouteIpv4Schema):
"""Parser for :
show route ipv6
show route vrf <vrf> ipv6"""
cli_command = [
'show route ipv6',
'show route vrf {vrf} ipv6',
'show route ipv6 {protocol}',
'show route vrf {vrf} ipv6 {protocol}',
'show route ipv6 {route}',
'show route vrf {vrf} ipv6 {route}'
]
"""
Codes: C - connected, S - static, R - RIP, B - BGP, (>) - Diversion path
D - EIGRP, EX - EIGRP external, O - OSPF, IA - OSPF inter area
N1 - OSPF NSSA external type 1, N2 - OSPF NSSA external type 2
E1 - OSPF external type 1, E2 - OSPF external type 2, E - EGP
i - ISIS, L1 - IS-IS level-1, L2 - IS-IS level-2
ia - IS-IS inter area, su - IS-IS summary null, * - candidate default
U - per-user static route, o - ODR, L - local, G - DAGR, l - LISP
A - access/subscriber, a - Application route
M - mobile route, r - RPL, t - Traffic Engineering, (!) - FRR Backup path
"""
source_protocol_dict = {
'ospf': ['O', 'IA', 'N1', 'N2', 'E1', 'E2'],
'odr': ['o'],
'isis': ['i', 'su', 'L1', 'L2', 'ia'],
'eigrp': ['D', 'EX'],
'static': ['S'],
'egp': ['E'],
'dagr': ['G'],
'rpl': ['r'],
'mobile router': ['M'],
'lisp': ['I', 'l'],
'nhrp': ['H'],
'local': ['L'],
'connected': ['C'],
'bgp': ['B'],
'rip': ['R'],
'per-user static route': ['U'],
'access/subscriber': ['A'],
'traffic engineering': ['t'],
'application route' : ['a'],
}
protocol_set = {'ospf', 'odr', 'isis', 'eigrp', 'static', 'mobile',
'rip', 'lisp', 'nhrp', 'local', 'connected', 'bgp'}
def cli(self, vrf=None, route=None, protocol=None, output=None):
# Check if argument from device.parse is protocol or route
if protocol and protocol not in self.protocol_set:
route = protocol
protocol = None
if output is None:
if vrf and route:
cmd = self.cli_command[5].format(
vrf=vrf,
route=route
)
elif vrf and protocol:
cmd = self.cli_command[3].format(
vrf=vrf,
protocol=protocol
)
elif vrf:
cmd = self.cli_command[1].format(
vrf=vrf
)
elif protocol:
cmd = self.cli_command[2].format(
protocol=protocol
)
elif route:
cmd = self.cli_command[4].format(
route=route
)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
# VRF: VRF501
# VRF: L:123
p1 = re.compile(r'^\s*VRF: +(?P<vrf>\S+)$')
# S 2001:1:1:1::1/128
# L 2001:2:2:2::2/128 is directly connected,
# i L2 2001:0:10:204:0:33::/126
# i L1 2001:21:21:21::21/128
# i*L2 ::/0
# a* ::/0
p2 = re.compile(r'^((?P<code1>[\w](\*)*)(\s*)?(?P<code2>\w+)? '
r'+(?P<network>\S+))?( +is +directly +connected\,)?$')
# [1/0] via 2001:20:1:2::1, 01:52:23, GigabitEthernet0/0/0/0
# [200/0] via 2001:13:13:13::13, 00:53:22
# [0/0] via ::, 5w2d
p3 = re.compile(r'^\[(?P<route_preference>\d+)\/(?P<metric>\d+)\] +'
r'via +(?P<next_hop>\S+)( +\(nexthop +in +vrf +\w+\))?,'
r'( +(?P<date>[\w:]+))?,?( +(?P<interface>[\w\/\.\-]+))?$')
# 01:52:24, Loopback0
p5 = re.compile(r'^(?P<date>[\w+:]+), +(?P<interface>\S+)$')
# Routing entry for 2001:1:1:1::1/128, 1 known subnets
# Routing entry for 2001:1:1:1::1/128, supernet
# Routing entry for 2001:1:1:1::1/128
p6 = re.compile(r'^Routing +entry +for +(?P<network>(?P<ip>[\w\:\.]+)'
r'\/(?P<mask>\d+))(?:, +(?P<net>[\w\s]+))?$')
# Known via "connected", distance 0, metric 0 (connected)
# Known via "eigrp 1", distance 130, metric 10880, type internal
# Known via "bgp 65161", distance 20, metric 0, candidate default path
p7 = re.compile(r'^Known +via +\"(?P<known_via>[\w ]+)\", +'
r'distance +(?P<distance>\d+), +metric +(?P<metric>\d+)'
r'( \(connected\))?(, +type +(?P<type>\S+))?(, +candidate +'
r'default +path)?$')
# * directly connected, via GigabitEthernet1.120
p8 = re.compile(r'^(\* +)?directly +connected, via +(?P<interface>\S+)$')
# Route metric is 10880, traffic share count is 1
p9 = re.compile(r'^Route +metric +is +(?P<metric>\d+)(, +'
r'traffic +share +count +is +(?P<share_count>\d+))?'
r'(, +Wt +is +\d+)?$')
# eigrp/100 (protoid=5, clientid=22)
p10 = re.compile(r'^(?P<redist_advertiser>\S+) +\(protoid=(?P<protoid>\d+)'
r', +clientid=(?P<clientid>\d+)\)$')
# Installed Oct 23 22:09:38.380 for 5d21h
p11 = re.compile(r'^Installed +(?P<date>[\S\s]+) +for +(?P<for>\S+)$')
# fe80::f816:3eff:fe76:b56d, from fe80::f816:3eff:fe76:b56d, via GigabitEthernet0/0/0/0.390
p12 = re.compile(r'^(?P<nexthop>\S+)(, from +(?P<from>\S+))?, '
r'+via +(?P<interface>\S+)$')
# R2_xrv#show route ipv6
p13 = re.compile(r'^((\S+#)?(show +route))|(Routing +Descriptor +'
r'Blocks)|(No +advertising +protos\.)|(Redist +Advertisers:)')
# Gateway of last resort is fe80::10ff:fe04:209e to network ::
# Gateway of last resort is not set
# Gateway of last resort is 10.50.15.1 to network 0.0.0.0
p14 = re.compile(r'^Gateway +of +last +resort +is '
r'+(?P<gateway>(not +set)|\S+)( +to +network '
r'+(?P<to_network>\S+))?$')
ret_dict = {}
address_family = 'ipv6'
index = 0
if not vrf:
vrf = 'default'
for line in out.splitlines():
line = line.strip()
# R2_xrv#show route ipv6
# Routing Descriptor Blocks
# No advertising protos.
m = p13.match(line)
if m or not line:
continue
# VRF: VRF501
m = p1.match(line)
if m:
vrf = m.groupdict()['vrf']
continue
# S 2001:1:1:1::1/128
# L 2001:2:2:2::2/128 is directly connected,
# i L2 2001:0:10:204:0:33::/126
# i L1 2001:21:21:21::21/128
# i*L2 ::/0
# a* ::/0
m = p2.match(line)
if m:
group = m.groupdict()
code1 = group['code1']
source_protocol_code = re.split(r'\*|\(\!\)|\(\>\)', code1)[0].strip()
for key,val in self.source_protocol_dict.items():
if source_protocol_code in val:
source_protocol = key
code2 = group['code2']
if code2:
code1 = '{} {}'.format(code1, code2)
network = group['network']
route_dict = ret_dict.setdefault('vrf', {}). \
setdefault(vrf, {}). \
setdefault('address_family', {}). \
setdefault(address_family, {}). \
setdefault('routes', {}). \
setdefault(network, {})
route_dict.update({'source_protocol': source_protocol})
route_dict.update({'source_protocol_codes': code1})
route_dict.update({'route': network})
route_dict.update({'active': True})
index = 0
continue
m = p3.match(line)
if m:
group = m.groupdict()
route_preference = int(group['route_preference'])
metric = int(group['metric'])
next_hop = group.get('next_hop', None)
updated = group.get('date', None)
interface = group.get('interface', None)
route_dict.update({'route_preference': route_preference})
route_dict.update({'metric': metric})
index += 1
next_hop_list_dict = route_dict.setdefault('next_hop', {}). \
setdefault('next_hop_list', {}). \
setdefault(int(index), {})
next_hop_list_dict.update({'index': index})
if next_hop:
next_hop_list_dict.update({'next_hop': next_hop})
if interface:
next_hop_list_dict.update({'outgoing_interface': interface})
if updated:
next_hop_list_dict.update({'updated': updated})
continue
# 01:52:24, Loopback0
m = p5.match(line)
if m:
group = m.groupdict()
updated = group['date']
interface = group['interface']
outgoing_interface_dict = route_dict.setdefault('next_hop', {}). \
setdefault('outgoing_interface', {}). \
setdefault(interface, {})
outgoing_interface_dict.update({'outgoing_interface': interface})
outgoing_interface_dict.update({'updated': updated})
continue
# Routing entry for 2001:1:1:1::1/128, 1 known subnets
# Routing entry for 2001:1:1:1::1/128, supernet
# Routing entry for 2001:1:1:1::1/128
m = p6.match(line)
if m:
group = m.groupdict()
network = group['network']
ip = group['ip']
mask = group['mask']
route_dict = ret_dict.setdefault('vrf', {}). \
setdefault(vrf, {}). \
setdefault('address_family', {}). \
setdefault(address_family, {}). \
setdefault('routes', {}). \
setdefault(network, {})
route_dict.update({'route': network})
route_dict.update({'ip': ip})
route_dict.update({'mask': mask})
route_dict.update({'active': True})
continue
# Known via "static", distance 1, metric 0, candidate default path
# Known via "eigrp 1", distance 130, metric 10880, type internal
# Known via "rip", distance 120, metric 2
# Known via "connected", distance 0, metric 0 (connected)
# Known via "eigrp 1", distance 130, metric 10880, type internal
# Known via "bgp 65161", distance 20, metric 0, candidate default path
m = p7.match(line)
if m:
group = m.groupdict()
known_via = group['known_via']
metric = int(group['metric'])
distance = int(group['distance'])
_type = group['type']
route_dict.update({'known_via': known_via})
route_dict.update({'metric': metric})
route_dict.update({'distance': distance})
if _type:
route_dict.update({'type': _type})
continue
# * directly connected, via GigabitEthernet1.120
m = p8.match(line)
if m:
group = m.groupdict()
code1 = group.get('code1', None)
source_protocol = None
network = group.get('network', None)
updated = group.get('date', None)
interface = group.get('interface', None)
if network:
route_dict = ret_dict.setdefault('vrf', {}). \
setdefault(vrf, {}). \
setdefault('address_family', {}). \
setdefault(address_family, {}). \
setdefault('routes', {}). \
setdefault(network, {})
route_dict.update({'route': network})
route_dict.update({'active': True})
if code1:
source_protocol_code = re.split(r'\*|\(\!\)|\(\>\)', code1)[0].strip()
for key,val in self.source_protocol_dict.items():
if source_protocol_code in val:
source_protocol = key
code2 = group.get('code2', None)
if code2:
code1 = '{} {}'.format(code1, code2)
route_dict.update({'source_protocol': source_protocol})
route_dict.update({'source_protocol_codes': code1})
outgoing_interface_dict = route_dict.setdefault('next_hop', {}). \
setdefault('outgoing_interface', {}). \
setdefault(interface, {})
if interface:
outgoing_interface_dict.update({'outgoing_interface': interface})
if updated:
outgoing_interface_dict.update({'updated': updated})
# Route metric is 10880, traffic share count is 1
m = p9.match(line)
if m:
group = m.groupdict()
metric = int(group['metric'])
outgoing_interface_dict.update({'metric': metric})
if group.get('share_count', None):
share_count = int(group['share_count'])
outgoing_interface_dict.update({'share_count': share_count})
# outgoing_interface_dict.update({k:v for k,v in group.items() if v})
continue
# eigrp/100 (protoid=5, clientid=22)
m = p10.match(line)
if m:
group = m.groupdict()
redist_advertiser = group['redist_advertiser']
protoid = int(group['protoid'])
clientid = int(group['clientid'])
redist_advertiser_dict = route_dict.setdefault('redist_advertisers', {}). \
setdefault(redist_advertiser, {})
redist_advertiser_dict.update({'protoid': protoid})
redist_advertiser_dict.update({'clientid': clientid})
continue
# Installed Oct 23 22:09:38.380 for 5d21h
m = p11.match(line)
if m:
group = m.groupdict()
installed_dict = route_dict.setdefault('installed', {})
installed_dict.update({k:v for k,v in group.items() if v})
continue
# fe80::f816:3eff:fe76:b56d, from fe80::f816:3eff:fe76:b56d, via GigabitEthernet0/0/0/0.390
m = p12.match(line)
if m:
group = m.groupdict()
nexthop = group['nexthop']
_from = group['from']
interface = group['interface']
index += 1
outgoing_interface_dict = route_dict.setdefault('next_hop', {}). \
setdefault('next_hop_list', {}). \
setdefault(int(index), {})
outgoing_interface_dict.update({'index': index})
outgoing_interface_dict.update({'outgoing_interface': interface})
if _from:
outgoing_interface_dict.update({'from': _from})
outgoing_interface_dict.update({'next_hop': nexthop})
continue
# Gateway of last resort is fe80::10ff:fe04:209e to network ::
# Gateway of last resort is not set
# Gateway of last resort is 10.50.15.1 to network 0.0.0.0
m14 = p14.match(line)
if m14:
group = m14.groupdict()
gw_dict = ret_dict.setdefault('vrf', {}).\
setdefault(vrf, {}).\
setdefault('last_resort', {})
gw_dict.update({'gateway': group['gateway']})
if group['to_network']:
gw_dict.update({'to_network' : group['to_network']})
continue
return ret_dict
# ====================================================
# schema for show route summary
# ====================================================
class ShowRouteAllSummarySchema(MetaParser):
"""Schema for :
show route afi-all safi-all summary
show route vrf all afi-all safi-all summary
show route vrf <vrf> afi-all safi-all summary"""
schema = {
'vrf': {
Any(): {
'address_family': {
Any(): {
'total_route_source': {
'routes': int,
'backup': int,
'deleted': int,
'memory_bytes': int,
},
'route_source': {
Any(): {
Any(): {
'routes': int,
'backup': int,
'deleted': int,
'memory_bytes': int,
},
Optional('routes'): int,
Optional('backup'): int,
Optional('deleted'): int,
Optional('memory_bytes'): int,
},
}
},
}
},
}
}
# ====================================================
# parser for show route summary
# ====================================================
class ShowRouteAllSummary(ShowRouteAllSummarySchema):
"""Parser for :
show route afi-all safi-all summary
show route vrf all afi-all safi-all summary
show route vrf <vrf> afi-all safi-all summary"""
cli_command = [
'show route afi-all safi-all summary',
'show route vrf {vrf} afi-all safi-all summary'
]
def cli(self, vrf=None, output=None):
if output is None:
if vrf:
cmd = self.cli_command[1].format(vrf=vrf)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
# VRF: VRF_NAME
p1 = re.compile(r'^VRF: (?P<vrf>.*)')
# IPv4 Unicast:
p2 = re.compile(r'(?P<address_family>^IPv.*)+:')
# connected 0 0 0 0
p3 = re.compile(
r'^(?P<protocol>[a-zA-Z0-9(\-|\_)]+) +(?P<instance>[a-zA-Z0-9(\-|\_)]+)* * +('
r'?P<routes>\d+) +(?P<backup>\d+) +(?P<deleted>\d+) +(?P<memory_bytes>\d+)')
ret_dict = {}
if vrf is None:
vrf = 'default'
vrf_dict = ret_dict.setdefault('vrf',{}).setdefault(vrf, {})
elif vrf != 'all':
vrf_dict = ret_dict.setdefault('vrf',{}).setdefault(vrf, {})
for line in out.splitlines():
line = line.strip()
if vrf == 'all':
# VRF: VRF_NAME
m = p1.match(line)
if m:
vrf_temp = m.groupdict()['vrf']
vrf_dict = ret_dict.setdefault('vrf',{}).setdefault(vrf_temp, {})
continue
# IPv4 Unicast:
m = p2.match(line)
if m:
addrs_fam = m.groupdict()['address_family']
addrs_fam_dict = vrf_dict.setdefault('address_family', {}).setdefault(addrs_fam, {})
vrf_rs_dict = addrs_fam_dict.setdefault('route_source', {})
# connected 0 0 0 0
m = p3.match(line)
if m:
group = m.groupdict()
protocol = group.pop('protocol')
instance = group.pop('instance')
if protocol == 'Total':
protocol_dict = addrs_fam_dict.setdefault('total_route_source', {})
else:
protocol_dict = vrf_rs_dict.setdefault(protocol, {})
if instance is not None:
inst_dict = protocol_dict.setdefault(instance, {})
inst_dict.update({k:int(v) for k, v in group.items() if v is not None})
else:
group = {k: int(v) for k, v in group.items() if v is not None}
protocol_dict.update(group)
continue
return ret_dict
| 42.401961
| 103
| 0.443237
| 4,771
| 47,575
| 4.311046
| 0.071893
| 0.046188
| 0.032089
| 0.015753
| 0.844564
| 0.81442
| 0.800321
| 0.788458
| 0.77781
| 0.759529
| 0
| 0.055134
| 0.409837
| 47,575
| 1,121
| 104
| 42.439786
| 0.677423
| 0.148923
| 0
| 0.734848
| 0
| 0.007576
| 0.165127
| 0.03615
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003788
| false
| 0
| 0.003788
| 0
| 0.02904
| 0.001263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcfe7a2370915e3e38f17b05627569aabbcb8d3f
| 10,987
|
py
|
Python
|
ev3/practice/control.py
|
qualityapp2014/fll_marshmallow_pandas_2020
|
86f0162bf855a36253093488f69b6164d156fb4d
|
[
"MIT"
] | null | null | null |
ev3/practice/control.py
|
qualityapp2014/fll_marshmallow_pandas_2020
|
86f0162bf855a36253093488f69b6164d156fb4d
|
[
"MIT"
] | 2
|
2021-02-11T05:11:43.000Z
|
2021-02-26T03:38:15.000Z
|
ev3/practice/control.py
|
qualityapp2014/fll_marshmallow_pandas_2020
|
86f0162bf855a36253093488f69b6164d156fb4d
|
[
"MIT"
] | 1
|
2021-02-09T02:03:39.000Z
|
2021-02-09T02:03:39.000Z
|
from config import *
import time
def move_motor(target, speed=100):
left_motor.reset_angle(0)
right_motor.reset_angle(0)
left_motor.run_target(speed, target, wait=False)
right_motor.run_target(speed, target, wait=False)
while True:
left_angle = left_motor.angle()
right_angle = right_motor.angle()
print('L: {}, R: {}, G: {}'.format(left_angle, right_angle, gyro.speed()))
if left_angle >= target and right_angle >= target:
break
wait(1)
left_motor.stop()
right_motor.stop()
def move(distance, acceleration=100, interval=1, gyro_scaler=10):
turn_rate = 0
speed = 0
direction = 1 if distance > 0 else -1
speed_delta = direction * acceleration / interval
robot.reset()
gyro.reset_angle(0)
while True:
current_distance = robot.distance()
if current_distance * direction >= distance * direction:
break
angle = gyro.angle()
turn_rate = angle * gyro_scaler
print('distance: {}, gyro angle: {}, turn rate: {}'.format(current_distance, angle, turn_rate))
robot.drive(speed, turn_rate)
wait(interval)
robot.stop()
def turn(angle):
gyro.reset_angle(0)
angle_error = angle
max_iter = 15
while abs(angle_error) > 0 and max_iter > 0:
robot.turn(angle_error)
angle_error = angle - gyro.angle()
print('Angle: {}, angle error: {}'.format(gyro.angle(), angle_error))
max_iter = max_iter - 1
# wait(angle_error*30)
def follow(distance, turn_kp=0.2, turn_ki=0.1, turn_kd=0):
if distance < 0:
print("Can't go follow lines backwards.")
return
# need to measure and tune this
BLACK = 10
WHITE = 94
MIDPOINT = (BLACK + WHITE) / 2
wait_interval = 1 # Don't need to change this.
# constants in turn rate pid control
# turn_kp = 0.1
# turn_ki = 0
# turn_kd = 0
max_angle_error_i = 5 # bound on error integral
max_turn_rate = 20
max_speed = 100
min_speed = 30
ramp_distance = 200 # distance to ramp from min_speed to max_speed and down again
ramp_down_distance_mult = 1.5 # sets a longer time to ramp down because of slippage
###
angle_error = 0
angle_error_prev = 0
angle_error_i = 0
angle_error_d = 0
direction = 1 if distance > 0 else -1
print('Turn pid: {}, {}, {}'.format(turn_kp, turn_ki, turn_kd))
print('Turn rate limits: max_angle_error_i: {}, max_turn_rate: {}'.format(max_angle_error_i, max_turn_rate))
robot.reset()
time_prev = time.time()
while True:
current_distance = abs(robot.distance())
if current_distance >= direction * distance:
break
speed_up = min(1, current_distance / ramp_distance) * (max_speed - min_speed) + min_speed
speed_down = min(1, (abs(distance) - current_distance) / (ramp_distance * ramp_down_distance_mult)) \
* (max_speed - min_speed) + min_speed
speed = min(speed_up, speed_down) * direction
time_now = time.time()
time_interval = time_now - time_prev
angle = MIDPOINT - color_left.reflection()
angle_error = 0 - angle # assume 0 set point
angle_error_i += angle_error * time_interval # error angle integral
angle_error_i = max(-max_angle_error_i, min(max_angle_error_i, angle_error_i)) # set bounds on error integral
angle_error_d = (angle_error - angle_error_prev) / time_interval # angle error derivative
angle_error_prev = angle_error
turn_rate = turn_kp * angle_error + turn_ki * angle_error_i + turn_kd * angle_error_d # pid equation
turn_rate = max(-max_turn_rate, min(max_turn_rate, turn_rate))
print('distance: {}, gyro angle: {}, speed: {:0.1f}, turn rate: {:0.2f}, dt: {:0.3f}'
.format(current_distance, angle, speed, turn_rate, time_interval))
print(' angle_error: {}, angle_error_i: {:0.2f}, angle_error_d: {:0.2f}'
.format(angle_error, angle_error_i, angle_error_d))
robot.drive(speed, turn_rate)
time_prev = time_now
wait(wait_interval)
robot.stop()
def move2(distance):
""" Move straight and read gyro angle to move in a straight line. """
wait_interval = 1 # Don't need to change this.
# constants in turn rate pid control
turn_kp = 3
turn_ki = 3
turn_kd = 0.1
max_angle_error_i = 5 # bound on error integral
angle_error_i_decay = 3 # decay on error integral to avoid oscillation around 0
max_turn_rate = 20
max_speed = 300
min_speed = 30
ramp_distance = 200 # distance to ramp from min_speed to max_speed and down again
ramp_down_distance_mult = 1.5 # sets a longer time to ramp down because of slippage
###
angle_error = 0
angle_error_prev = 0
angle_error_i = 0
angle_error_d = 0
direction = 1 if distance > 0 else -1
print('Turn pid: {}, {}, {}'.format(turn_kp, turn_ki, turn_kd))
print('Turn rate limits: max_angle_error_i: {}, max_turn_rate: {}'.format(max_angle_error_i, max_turn_rate))
robot.reset()
gyro.reset_angle(0)
time_prev = time.time()
while True:
current_distance = abs(robot.distance())
if current_distance >= direction * distance:
break
speed_up = min(1, current_distance / ramp_distance) * (max_speed - min_speed) + min_speed
speed_down = min(1, (abs(distance) - current_distance) / (ramp_distance * ramp_down_distance_mult)) \
* (max_speed - min_speed) + min_speed
speed = min(speed_up, speed_down) * direction
time_now = time.time()
time_interval = time_now - time_prev
angle = gyro.angle()
angle_error = 0 - angle # assume 0 set point
angle_error_i += angle_error * time_interval # error angle integral
angle_error_i = max(-max_angle_error_i, min(max_angle_error_i, angle_error_i)) # set bounds on error integral
# We add an error integral decay because the EV3's angle sensor only gives readings in integer degrees.
# To prevent oscillation around -1 to 1 degree, we decay the integral error.
angle_error_i *= max(0, 1 - angle_error_i_decay*time_interval)
angle_error_d = (angle_error - angle_error_prev) / time_interval # angle error derivative
angle_error_prev = angle_error
turn_rate = turn_kp * angle_error + turn_ki * angle_error_i + turn_kd * angle_error_d # pid equation
turn_rate = max(-max_turn_rate, min(max_turn_rate, turn_rate))
print('distance: {}, gyro angle: {}, speed: {:0.1f}, turn rate: {:0.2f}, dt: {:0.3f}'
.format(current_distance, angle, speed, turn_rate, time_interval))
print(' angle_error: {}, angle_error_i: {:0.2f}, angle_error_d: {:0.2f}'
.format(angle_error, angle_error_i, angle_error_d))
robot.drive(speed, turn_rate)
time_prev = time_now
wait(wait_interval)
robot.stop()
<<<<<<< HEAD
def move_pid_gyro(distance, target_angle=0, max_speed=300, min_speed=30, max_turn_rate = 40):
=======
<<<<<<< HEAD
def move_pid_gyro(distance, target_angle=0, max_speed=300, min_speed=30):
=======
def move_pid_gyro(distance, target_angle=0, max_speed=300, min_speed=30, max_turn_rate = 40):
>>>>>>> 2b70b90b8826433273b749bd434bf4ef64251666
>>>>>>> ab4b63078163923e6efcf58af0b5ea5e373d9dcc
""" Move with a target turning angle and distance """
# modified from move2 by Yan, if target_angle is set to 0, then it moves along a straight line
wait_interval = 1 # Don't need to change this.
# constants in turn rate pid control
turn_kp = 3
turn_ki = 3
turn_kd = 0.1
max_angle_error_i = 5 # bound on error integral
angle_error_i_decay = 3 # decay on error integral to avoid oscillation around 0
<<<<<<< HEAD
#max_turn_rate = 40
#max_speed = 300
#min_speed = 30
#ramp_distance = 200 # distance to ramp from min_speed to max_speed and down again
ramp_distance = 350
=======
<<<<<<< HEAD
max_turn_rate = 20
#max_speed = 300
#min_speed = 30
ramp_distance = 200 # distance to ramp from min_speed to max_speed and down again
=======
#max_turn_rate = 40
#max_speed = 300
#min_speed = 30
#ramp_distance = 200 # distance to ramp from min_speed to max_speed and down again
ramp_distance = 350
>>>>>>> 2b70b90b8826433273b749bd434bf4ef64251666
>>>>>>> ab4b63078163923e6efcf58af0b5ea5e373d9dcc
ramp_down_distance_mult = 1.5 # sets a longer time to ramp down because of slippage
###
#angle_error = 0
angle_error = target_angle
#angle_error_prev = 0
angle_error_prev = target_angle
angle_error_i = 0
angle_error_d = 0
direction = 1 if distance > 0 else -1
print('Turn pid: {}, {}, {}'.format(turn_kp, turn_ki, turn_kd))
print('Turn rate limits: max_angle_error_i: {}, max_turn_rate: {}'.format(max_angle_error_i, max_turn_rate))
robot.reset()
gyro.reset_angle(0)
time_prev = time.time()
while True:
current_distance = abs(robot.distance())
if current_distance >= direction * distance:
break
speed_up = min(1, current_distance / ramp_distance) * (max_speed - min_speed) + min_speed
speed_down = min(1, (abs(distance) - current_distance) / (ramp_distance * ramp_down_distance_mult)) \
* (max_speed - min_speed) + min_speed
speed = min(speed_up, speed_down) * direction
time_now = time.time()
time_interval = time_now - time_prev
angle = gyro.angle()
#angle_error = 0 - angle # assume 0 set point
angle_error = target_angle - angle # assume 0 set point
angle_error_i += angle_error * time_interval # error angle integral
angle_error_i = max(-max_angle_error_i, min(max_angle_error_i, angle_error_i)) # set bounds on error integral
# We add an error integral decay because the EV3's angle sensor only gives readings in integer degrees.
# To prevent oscillation around -1 to 1 degree, we decay the integral error.
angle_error_i *= max(0, 1 - angle_error_i_decay*time_interval)
angle_error_d = (angle_error - angle_error_prev) / time_interval # angle error derivative
angle_error_prev = angle_error
turn_rate = turn_kp * angle_error + turn_ki * angle_error_i + turn_kd * angle_error_d # pid equation
turn_rate = max(-max_turn_rate, min(max_turn_rate, turn_rate))
print('distance: {}, gyro angle: {}, speed: {:0.1f}, turn rate: {:0.2f}, dt: {:0.3f}'
.format(current_distance, angle, speed, turn_rate, time_interval))
print(' angle_error: {}, angle_error_i: {:0.2f}, angle_error_d: {:0.2f}'
.format(angle_error, angle_error_i, angle_error_d))
robot.drive(speed, turn_rate)
time_prev = time_now
wait(wait_interval)
robot.stop()
| 34.879365
| 117
| 0.655684
| 1,573
| 10,987
| 4.287985
| 0.095359
| 0.152706
| 0.068495
| 0.031134
| 0.81705
| 0.806375
| 0.795849
| 0.778799
| 0.774796
| 0.769607
| 0
| 0.039192
| 0.242923
| 10,987
| 315
| 118
| 34.879365
| 0.7717
| 0.174934
| 0
| 0.773399
| 0
| 0.014778
| 0.088891
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.009852
| null | null | 0.078818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4c17edfdbcbf10270f202c340639863237dc2e8f
| 161
|
py
|
Python
|
Temp/Censor/obscene_words_filter/default.py
|
fgl-foundation/ComiteteCore
|
25791521547ffe891672f61f1e5328a2181ba074
|
[
"MIT"
] | 7
|
2017-10-09T13:42:29.000Z
|
2018-09-18T09:06:38.000Z
|
Temp/Censor/obscene_words_filter/default.py
|
fgl-foundation/Aquarius
|
25791521547ffe891672f61f1e5328a2181ba074
|
[
"MIT"
] | 1
|
2018-04-02T17:54:06.000Z
|
2018-04-02T17:54:06.000Z
|
Temp/Censor/obscene_words_filter/default.py
|
fgl-foundation/Aquarius
|
25791521547ffe891672f61f1e5328a2181ba074
|
[
"MIT"
] | 4
|
2018-01-17T07:45:20.000Z
|
2019-04-17T16:00:06.000Z
|
from . import conf
from .words_filter import ObsceneWordsFilter
def get_default_filter():
return ObsceneWordsFilter(conf.bad_words_re, conf.good_words_re)
| 23
| 68
| 0.819876
| 22
| 161
| 5.681818
| 0.590909
| 0.112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118012
| 161
| 6
| 69
| 26.833333
| 0.880282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
4c780733d16134e8b87aee6e1c53aa420e64c0f2
| 98
|
py
|
Python
|
modules/encoders/__init__.py
|
SmilesDZgk/DU-VAE
|
6c590922a5b634fadff814bd70d05065d584fa9f
|
[
"MIT"
] | 3
|
2021-12-01T12:11:30.000Z
|
2022-02-17T06:01:10.000Z
|
modules/encoders/__init__.py
|
SmilesDZgk/DU-VAE
|
6c590922a5b634fadff814bd70d05065d584fa9f
|
[
"MIT"
] | null | null | null |
modules/encoders/__init__.py
|
SmilesDZgk/DU-VAE
|
6c590922a5b634fadff814bd70d05065d584fa9f
|
[
"MIT"
] | null | null | null |
from .enc_lstm import *
from .enc_resnet_v2 import *
from .flow import *
from .enc_flow import *
| 16.333333
| 28
| 0.744898
| 16
| 98
| 4.3125
| 0.4375
| 0.304348
| 0.376812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 0.173469
| 98
| 5
| 29
| 19.6
| 0.839506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4c7b62a4116b44678f4de3d1c75c5a512ba54a94
| 6,319
|
py
|
Python
|
test/test_execution_time.py
|
skyxie/cron-validator
|
a61bb3113aed92b7da960015f81ac78a7187b48f
|
[
"MIT"
] | null | null | null |
test/test_execution_time.py
|
skyxie/cron-validator
|
a61bb3113aed92b7da960015f81ac78a7187b48f
|
[
"MIT"
] | null | null | null |
test/test_execution_time.py
|
skyxie/cron-validator
|
a61bb3113aed92b7da960015f81ac78a7187b48f
|
[
"MIT"
] | null | null | null |
from cron_validator.util import str_to_datetime
from cron_validator.validator import CronValidator
def test_generate_execution_time_from_minute_match():
from_str = "2019-04-23 12:00"
to_str = "2019-04-23 12:59"
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("* * * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 60
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("23 * * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 1
assert dts[0] == str_to_datetime("2019-04-23 12:23")
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("1,23,59 * * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 3
assert dts[0] == str_to_datetime("2019-04-23 12:01")
assert dts[1] == str_to_datetime("2019-04-23 12:23")
assert dts[2] == str_to_datetime("2019-04-23 12:59")
def test_generate_execution_time_from_hour_match():
from_str = "2019-04-22 00:00"
to_str = "2019-04-23 23:59"
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 * * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 48
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time(
"15 0,5,10,15,20 * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)
):
print(dt)
dts.append(dt)
assert len(dts) == 10
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 */2 * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 24
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 1/2 * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 24
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 7-9 * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 6
def test_generate_execution_time_from_day_of_month_match():
from_str = "2019-04-22 00:00"
to_str = "2019-04-23 23:59"
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 2
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 22 * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 1
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 22-24 * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 2
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 5 * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 0
def test_generate_execution_time_from_month_match():
from_str = "2019-04-22 00:00"
to_str = "2019-04-23 23:59"
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 2
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 * 4 *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 2
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 * 5 *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 0
def test_generate_execution_time_from_day_of_week_match():
from_str = "2019-04-22 00:00"
to_str = "2019-04-23 23:59"
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 2
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 * * 0", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 0
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("0 0 * * 1", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert len(dts) == 1
def test_make_sure_dt_is_rounded():
from_str = "2019-04-23 12:00:01"
to_str = "2019-04-23 12:59:02"
print("--------------------------------------------------")
dts = []
for dt in CronValidator.get_execution_time("* * * * *", from_dt=str_to_datetime(from_str), to_dt=str_to_datetime(to_str)):
print(dt)
dts.append(dt)
assert dt.second == 0
assert dt.microsecond == 0
assert len(dts) == 60
| 36.316092
| 132
| 0.534578
| 858
| 6,319
| 3.642191
| 0.06993
| 0.0992
| 0.17888
| 0.1824
| 0.92768
| 0.92544
| 0.90496
| 0.8752
| 0.85312
| 0.81088
| 0
| 0.055577
| 0.191328
| 6,319
| 173
| 133
| 36.526012
| 0.555969
| 0
| 0
| 0.732394
| 1
| 0
| 0.223611
| 0.15034
| 0
| 0
| 0
| 0
| 0.176056
| 1
| 0.042254
| false
| 0
| 0.014085
| 0
| 0.056338
| 0.267606
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e6fbfda4e3bf38ae1718006466dd0520a7e2541e
| 4,891
|
py
|
Python
|
1_foodsByNutrients/mongo_models/model_malesRDI.py
|
nyck33/bioactive_dash
|
94c4e57467d88c5ed0d1b97821c5505f662468ff
|
[
"MIT"
] | null | null | null |
1_foodsByNutrients/mongo_models/model_malesRDI.py
|
nyck33/bioactive_dash
|
94c4e57467d88c5ed0d1b97821c5505f662468ff
|
[
"MIT"
] | null | null | null |
1_foodsByNutrients/mongo_models/model_malesRDI.py
|
nyck33/bioactive_dash
|
94c4e57467d88c5ed0d1b97821c5505f662468ff
|
[
"MIT"
] | null | null | null |
"""
infant, child, male, female, preg, lactation for all plus macroUpper
so 6 groups, 2 classes for each (RDI, upper limits) plus this table of
Acceptable MacroNutrient Distribution Ranges for children 1-3, 4-18 and adults
"""
from flask import current_app as app
from .model_nutrients import NutrientsDocument
# Just a shorthand
#db_mongo = app.db_mongo # MongoEngine(cnf) in main.py
from .db_setup import db_mongo
class MalesElementsRDI(NutrientsDocument):
"""
elements, vitamins, macro
"""
meta = {
'collection': 'MalesElementsRDI'
}
# mg/d = m, ug/d = u, g/d = g
life_stage_grp = db_mongo.StringField(default="")
calcium = db_mongo.StringField(default='') # m
chromium = db_mongo.StringField(default='') # u
copper = db_mongo.StringField(default="") # u
fluoride = db_mongo.StringField(default='') # m
iodine = db_mongo.StringField(default='') # u
iron = db_mongo.StringField(default='') # m
magnesium = db_mongo.StringField(default='') # m
manganese = db_mongo.StringField(default='') # u
molybdenum = db_mongo.StringField(default="") # u
phosphorus = db_mongo.StringField(default='') # m
selenium = db_mongo.StringField(default='') # u
zinc = db_mongo.StringField(default='') # m
potassium = db_mongo.StringField(default='') # m
sodium = db_mongo.StringField(default='') # m
chloride = db_mongo.StringField(default='') # g
class MalesVitaminsRDI(NutrientsDocument):
"""
elements, vitamins, macro
"""
meta = {
'collection': 'MalesVitaminsRDI'
}
life_stage_grp = db_mongo.StringField(default="")
# mg/d = m, ug/d = u, g/d = g
vitaminA = db_mongo.StringField(default='') # u
vitaminC = db_mongo.StringField(default='') # m
vitaminD = db_mongo.StringField(default="") # u
vitaminE = db_mongo.StringField(default='') # m
vitaminK = db_mongo.StringField(default='') # u
thiamin = db_mongo.StringField(default='') # m
riboflavin = db_mongo.StringField(default='') # m
niacin = db_mongo.StringField(default='') # m
vitaminB6 = db_mongo.StringField(default="") # m
folate = db_mongo.StringField(default='') # u
vitaminB12 = db_mongo.StringField(default='') # u
pantothenicAcid = db_mongo.StringField(default='') # m
biotin = db_mongo.StringField(default='') # m
choline = db_mongo.StringField(default='') # m
class MalesMacroRDI(NutrientsDocument):
meta={
'collection': 'MalesMacroRDI'
}
life_stage_grp = db_mongo.StringField(default="")
total_water = db_mongo.StringField()
carbs = db_mongo.StringField()
total_fiber = db_mongo.StringField()
fat = db_mongo.StringField()
linoleicAcid = db_mongo.StringField()
alphaLinolenicAcid = db_mongo.StringField()
protein = db_mongo.StringField()
class MalesElementsUpperRDI(NutrientsDocument):
"""
elements, vitamins, macro
"""
meta = {
'collection': 'MalesElementsUpperRDI'
}
# mg/d = m, ug/d = u, g/d = g
life_stage_grp = db_mongo.StringField(default="")
calcium = db_mongo.StringField(default='') # m
chromium = db_mongo.StringField(default='') # u
copper = db_mongo.StringField(default="") # u
fluoride = db_mongo.StringField(default='') # m
iodine = db_mongo.StringField(default='') # u
iron = db_mongo.StringField(default='') # m
magnesium = db_mongo.StringField(default='') # m
manganese = db_mongo.StringField(default='') # u
molybdenum = db_mongo.StringField(default="") # u
phosphorus = db_mongo.StringField(default='') # m
selenium = db_mongo.StringField(default='') # u
zinc = db_mongo.StringField(default='') # m
potassium = db_mongo.StringField(default='') # m
sodium = db_mongo.StringField(default='') # m
chloride = db_mongo.StringField(default='') # g
class MalesVitaminsUpperRDI(NutrientsDocument):
"""
elements, vitamins, macro
"""
meta = {
'collection': 'MalesVitaminsUpperRDI'
}
# mg/d = m, ug/d = u, g/d = g
life_stage_grp = db_mongo.StringField(default="")
vitaminA = db_mongo.StringField(default='') # u
vitaminC = db_mongo.StringField(default='') # m
vitaminD = db_mongo.StringField(default="") # u
vitaminE = db_mongo.StringField(default='') # m
vitaminK = db_mongo.StringField(default='') # u
thiamin = db_mongo.StringField(default='') # m
riboflavin = db_mongo.StringField(default='') # m
niacin = db_mongo.StringField(default='') # m
vitaminB6 = db_mongo.StringField(default="") # m
folate = db_mongo.StringField(default='') # u
vitaminB12 = db_mongo.StringField(default='') # u
pantothenicAcid = db_mongo.StringField(default='') # m
biotin = db_mongo.StringField(default='') # m
choline = db_mongo.StringField(default='') # m
| 37.335878
| 78
| 0.660601
| 558
| 4,891
| 5.632616
| 0.200717
| 0.162584
| 0.400891
| 0.501114
| 0.767102
| 0.767102
| 0.700923
| 0.677378
| 0.677378
| 0.673878
| 0
| 0.003326
| 0.200777
| 4,891
| 130
| 79
| 37.623077
| 0.800716
| 0.126968
| 0
| 0.731183
| 0
| 0
| 0.033124
| 0.010155
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.892473
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
fc2ca2a337f289856be4a660bd9546b69665d05b
| 9,843
|
py
|
Python
|
tests/test_hercprand.py
|
Kefan-pauline/HER-CPRAND
|
131a284a486ecc34baa7d1d766836ab7dda12087
|
[
"MIT"
] | null | null | null |
tests/test_hercprand.py
|
Kefan-pauline/HER-CPRAND
|
131a284a486ecc34baa7d1d766836ab7dda12087
|
[
"MIT"
] | null | null | null |
tests/test_hercprand.py
|
Kefan-pauline/HER-CPRAND
|
131a284a486ecc34baa7d1d766836ab7dda12087
|
[
"MIT"
] | null | null | null |
from src._hercprand import her_CPRAND1,her_CPRAND2,her_CPRAND,her_CPRAND4,her_CPRAND5,her_CPRAND3
import numpy as np
import tensorly as tl
import matplotlib.pyplot as plt
import copy
from src._base import init_factors,random_init_fac
from src._herals import her_Als
def test_hercprand():
"""
Run herCPRAND1 2 3 4 5 for the simple and complicated case, plot exact/estimated error.
Print restart percentage.
Compare running time with herCPRAND for complicated case.
"""
I=50
J=50
K=50
r=10 # rank
n_samples=int(10*r*np.log(r)+1) # nb of randomized samples
fac_true,noise=init_factors(I,J,K,r,True)
t=tl.cp_to_tensor((None,fac_true))+noise
factors=random_init_fac(t,r)
weights1,factors1,it1,error1,error_es1,cpt1,time1=her_CPRAND1(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("1 Complicated case pct restart",cpt1)
print("1 Complicated case time err_rand",np.cumsum(time1)[len(time1)-1])
weights2,factors2,it2,error2,error_es2,cpt2,time2=her_CPRAND2(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("2 Complicated case pct restart",cpt2)
print("2 Complicated case time err_rand",np.cumsum(time2)[len(time2)-1])
weights3,factors3,it3,error3,error_es3,cpt3,time3=her_CPRAND3(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("3 Complicated case pct restart",cpt3)
print("3 Complicated case time err_rand",np.cumsum(time3)[len(time3)-1])
weights4,factors4,it4,error4,error_es4,cpt4,time4=her_CPRAND4(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("4 Complicated case pct restart",cpt4)
print("4 Complicated case time err_rand",np.cumsum(time4)[len(time4)-1])
weights5,factors5,it5,error5,error_es5,cpt5,time5=her_CPRAND5(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("5 Complicated case pct restart",cpt5)
print("5 Complicated case time err_rand",np.cumsum(time5)[len(time5)-1])
plt.figure(0)
plt.plot(range(len(error1)),error1,'b-',label="exact")
plt.plot(range(len(error_es1)),error_es1,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand1 for complicated case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(1)
plt.plot(range(len(error2)),error2,'b-',label="exact")
plt.plot(range(len(error_es2)),error_es2,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand2 for complicated case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(2)
plt.plot(range(len(error3)),error3,'b-',label="exact")
plt.plot(range(len(error_es3)),error_es3,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand3 for complicated case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(3)
plt.plot(range(len(error4)),error4,'b-',label="exact")
plt.plot(range(len(error_es4)),error_es4,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand4 for complicated case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(4)
plt.plot(range(len(error5)),error5,'b-',label="exact")
plt.plot(range(len(error_es5)),error_es5,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand5 for complicated case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
fac_true,noise=init_factors(I,J,K,r,False)
t=tl.cp_to_tensor((None,fac_true))+noise
factors=random_init_fac(t,r)
weights1,factors1,it1,error1,error_es1,cpt1,time1=her_CPRAND1(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("Simple case pct restart",cpt1)
weights2,factors2,it2,error2,error_es2,cpt2,time2=her_CPRAND2(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("Simple case pct restart",cpt2)
weights3,factors3,it3,error3,error_es3,cpt3,time3=her_CPRAND(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("Simple case pct restart",cpt3)
weights4,factors4,it4,error4,error_es4,cpt4,time4=her_CPRAND4(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("Simple case pct restart",cpt4)
weights5,factors5,it5,error5,error_es5,cpt5,time5=her_CPRAND5(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("Simple case pct restart",cpt5)
plt.figure(5)
plt.plot(range(len(error1)),error1,'b-',label="exact")
plt.plot(range(len(error_es1)),error_es1,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand1 for simple case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(6)
plt.plot(range(len(error2)),error2,'b-',label="exact")
plt.plot(range(len(error_es2)),error_es2,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand2 for simple case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(7)
plt.plot(range(len(error3)),error3,'b-',label="exact")
plt.plot(range(len(error_es3)),error_es3,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand3 for simple case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(8)
plt.plot(range(len(error4)),error4,'b-',label="exact")
plt.plot(range(len(error_es4)),error_es4,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand4 for simple case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
plt.figure(9)
plt.plot(range(len(error5)),error5,'b-',label="exact")
plt.plot(range(len(error_es5)),error_es5,'r--',label="err rand")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand5 for simple case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
def test_hercprand_35():
"""
test hercprand 3 5 for complicated case
"""
I=50
J=50
K=50
r=10 # rank
n_samples=int(10*r*np.log(r)+1) # nb of randomized samples
fac_true,noise=init_factors(I,J,K,r,True)
t=tl.cp_to_tensor((None,fac_true))+noise
factors=random_init_fac(t,r)
weights1,factors1,it1,error1,cpt1,time1=her_Als(t,r,factors=copy.deepcopy(factors),it_max=200,time_rec=True)
print("her als Complicated case pct restart",cpt1)
weights3,factors3,it3,error3,error_es3,cpt3,time3=her_CPRAND(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("3 Complicated case pct restart",cpt3)
print("3 Complicated case time err_rand",np.cumsum(time3)[len(time3)-1])
print("3 min error", np.min(error3))
print("3 min error es", np.min(error_es3))
weights5,factors5,it5,error5,error_es5,cpt5,time5=her_CPRAND5(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("5 Complicated case pct restart",cpt5)
print("5 Complicated case time err_rand",np.cumsum(time5)[len(time5)-1])
print("5 min error", np.min(error5))
print("5 min error es", np.min(error_es5))
plt.figure(0)
plt.plot(range(len(error3)),error3,'b-',label="exact 3")
plt.plot(range(len(error_es3)),error_es3,'r--',label="err rand 3")
plt.plot(range(len(error5)),error5,'g-',label="exact 5")
plt.plot(range(len(error_es5)),error_es5,'k--',label="err rand 5")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand3 5 for complicated case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
def test_hercprand_13():
"""
test hercprand 1 3 for complicated case
"""
I=50
J=50
K=50
r=10 # rank
n_samples=int(10*r*np.log(r)+1) # nb of randomized samples
fac_true,noise=init_factors(I,J,K,r,True)
t=tl.cp_to_tensor((None,fac_true))+noise
factors=random_init_fac(t,r)
weights1,factors1,it1,error1,cpt1,time1=her_Als(t,r,factors=copy.deepcopy(factors),it_max=200,time_rec=True)
print("her als Complicated case pct restart",cpt1)
weights3,factors3,it3,error3,error_es3,cpt3,time3=her_CPRAND3(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("3 Complicated case pct restart",cpt3)
print("3 Complicated case time err_rand",np.cumsum(time3)[len(time3)-1])
print("3 min error", np.min(error3))
print("3 min error es", np.min(error_es3))
weights5,factors5,it5,error5,error_es5,cpt5,time5=her_CPRAND1(t,r,n_samples,factors=copy.deepcopy(factors),exact_err=True,it_max=200,err_it_max=100,time_rec=True)
print("5 Complicated case pct restart",cpt5)
print("5 Complicated case time err_rand",np.cumsum(time5)[len(time5)-1])
print("5 min error", np.min(error5))
print("5 min error es", np.min(error_es5))
plt.figure(0)
plt.plot(range(len(error3)),error3,'b-',label="exact 3")
plt.plot(range(len(error_es3)),error_es3,'r--',label="err rand 3")
plt.plot(range(len(error5)),error5,'g-',label="exact 1")
plt.plot(range(len(error_es5)),error_es5,'k--',label="err rand 1")
plt.xlabel('it')
plt.yscale('log')
plt.title('hercprand 1 3 for complicated case')
plt.ylabel('terminaison criterion')
plt.legend(loc='best')
| 43.941964
| 166
| 0.703241
| 1,609
| 9,843
| 4.172778
| 0.091983
| 0.069258
| 0.050045
| 0.062556
| 0.919124
| 0.91406
| 0.910783
| 0.894102
| 0.889485
| 0.879357
| 0
| 0.050399
| 0.133191
| 9,843
| 223
| 167
| 44.139013
| 0.736521
| 0.034746
| 0
| 0.741935
| 0
| 0
| 0.193326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016129
| false
| 0
| 0.037634
| 0
| 0.053763
| 0.177419
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc7a12821a9923285f17c0b25d190bf914850aaf
| 1,873
|
py
|
Python
|
tests/test_uu_events/test_nas_emm_detach_request.py
|
matan1008/srsran-controller
|
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
|
[
"MIT"
] | null | null | null |
tests/test_uu_events/test_nas_emm_detach_request.py
|
matan1008/srsran-controller
|
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
|
[
"MIT"
] | null | null | null |
tests/test_uu_events/test_nas_emm_detach_request.py
|
matan1008/srsran-controller
|
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
|
[
"MIT"
] | null | null | null |
import datetime
from pyshark import FileCapture
from srsran_controller.uu_events.factory import EventsFactory
DETACH_REQUEST_PCAP_DATA = (
'd4c3b2a1020004000000000000000000ffff0000950000001dcb8b604daf03003702000037020000beefdead023700006d61632d6c746501'
'0003020047030000042be807010a000f00013d3a221f1f0935a000004802a2eb378a386060e8a1217ec01e220000234382325ce091cc210a'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
)
def test_parsing_emm_detach_request(tmp_path):
p = tmp_path / 'detach_request.pcap'
p.write_bytes(bytes.fromhex(DETACH_REQUEST_PCAP_DATA))
with FileCapture(str(p)) as pcap:
detach = list(EventsFactory().from_packet(list(pcap)[0]))[0]
assert detach == {
'tmsi': '0x1c1192e7',
'event': 'Detach request',
'rnti': 71,
'time': datetime.datetime(2021, 4, 30, 12, 17, 17, 241485),
}
| 56.757576
| 118
| 0.860117
| 86
| 1,873
| 18.534884
| 0.55814
| 0.983689
| 1.264743
| 1.40527
| 0.562108
| 0.562108
| 0.562108
| 0
| 0
| 0
| 0
| 0.701291
| 0.09023
| 1,873
| 32
| 119
| 58.53125
| 0.234155
| 0
| 0
| 0.296296
| 0
| 0
| 0.680192
| 0.648158
| 0
| 1
| 0.005339
| 0
| 0.037037
| 1
| 0.037037
| false
| 0
| 0.111111
| 0
| 0.148148
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fc8242c0303f605782e89b27b8cdbf252d9b298b
| 7,113
|
py
|
Python
|
tests/testflows/rbac/helper/tables.py
|
chalice19/ClickHouse
|
2f38e7bc5c2113935ab86260439bb543a1737291
|
[
"Apache-2.0"
] | 1
|
2022-02-27T15:21:20.000Z
|
2022-02-27T15:21:20.000Z
|
tests/testflows/rbac/helper/tables.py
|
chalice19/ClickHouse
|
2f38e7bc5c2113935ab86260439bb543a1737291
|
[
"Apache-2.0"
] | 16
|
2022-02-14T15:53:29.000Z
|
2022-03-25T18:39:16.000Z
|
tests/testflows/rbac/helper/tables.py
|
chalice19/ClickHouse
|
2f38e7bc5c2113935ab86260439bb543a1737291
|
[
"Apache-2.0"
] | null | null | null |
from collections import namedtuple
table_tuple = namedtuple("table_tuple", "create_statement cluster")
table_types = {
"MergeTree": table_tuple(
"CREATE TABLE {name} (d DATE, a String, b UInt8, x String, y Int8) ENGINE = MergeTree() PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
None,
),
"ReplacingMergeTree": table_tuple(
"CREATE TABLE {name} (d DATE, a String, b UInt8, x String, y Int8) ENGINE = ReplacingMergeTree() PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
None,
),
"SummingMergeTree": table_tuple(
"CREATE TABLE {name} (d DATE, a String, b UInt8 DEFAULT 1, x String, y Int8) ENGINE = SummingMergeTree() PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
None,
),
"AggregatingMergeTree": table_tuple(
"CREATE TABLE {name} (d DATE, a String, b UInt8, x String, y Int8) ENGINE = AggregatingMergeTree() PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
None,
),
"CollapsingMergeTree": table_tuple(
"CREATE TABLE {name} (d Date, a String, b UInt8, x String, y Int8, sign Int8 DEFAULT 1) ENGINE = CollapsingMergeTree(sign) PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
None,
),
"VersionedCollapsingMergeTree": table_tuple(
"CREATE TABLE {name} (d Date, a String, b UInt8, x String, y Int8, version UInt64, sign Int8 DEFAULT 1) ENGINE = VersionedCollapsingMergeTree(sign, version) PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
None,
),
"GraphiteMergeTree": table_tuple(
"CREATE TABLE {name} (d Date, a String, b UInt8, x String, y Int8, Path String, Time DateTime, Value Float64, col UInt64, Timestamp Int64) ENGINE = GraphiteMergeTree('graphite_rollup_example') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
None,
),
"ReplicatedMergeTree-sharded_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER sharded_cluster (d DATE, a String, b UInt8, x String, y Int8) \
ENGINE = ReplicatedMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"sharded_cluster",
),
"ReplicatedMergeTree-one_shard_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER one_shard_cluster (d DATE, a String, b UInt8, x String, y Int8) \
ENGINE = ReplicatedMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"one_shard_cluster",
),
"ReplicatedReplacingMergeTree-sharded_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER sharded_cluster (d DATE, a String, b UInt8, x String, y Int8) \
ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"sharded_cluster",
),
"ReplicatedReplacingMergeTree-one_shard_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER one_shard_cluster (d DATE, a String, b UInt8, x String, y Int8) \
ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"one_shard_cluster",
),
"ReplicatedSummingMergeTree-sharded_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER sharded_cluster (d DATE, a String, b UInt8 DEFAULT 1, x String, y Int8) \
ENGINE = ReplicatedSummingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"sharded_cluster",
),
"ReplicatedSummingMergeTree-one_shard_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER one_shard_cluster (d DATE, a String, b UInt8 DEFAULT 1, x String, y Int8) \
ENGINE = ReplicatedSummingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"one_shard_cluster",
),
"ReplicatedAggregatingMergeTree-sharded_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER sharded_cluster (d DATE, a String, b UInt8, x String, y Int8) \
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"sharded_cluster",
),
"ReplicatedAggregatingMergeTree-one_shard_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER one_shard_cluster (d DATE, a String, b UInt8, x String, y Int8) \
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"one_shard_cluster",
),
"ReplicatedCollapsingMergeTree-sharded_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER sharded_cluster (d Date, a String, b UInt8, x String, y Int8, sign Int8 DEFAULT 1) \
ENGINE = ReplicatedCollapsingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}', sign) PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"sharded_cluster",
),
"ReplicatedCollapsingMergeTree-one_shard_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER one_shard_cluster (d Date, a String, b UInt8, x String, y Int8, sign Int8 DEFAULT 1) \
ENGINE = ReplicatedCollapsingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}', sign) PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"one_shard_cluster",
),
"ReplicatedVersionedCollapsingMergeTree-sharded_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER sharded_cluster (d Date, a String, b UInt8, x String, y Int8, version UInt64, sign Int8 DEFAULT 1) \
ENGINE = ReplicatedVersionedCollapsingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}', sign, version) PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"sharded_cluster",
),
"ReplicatedVersionedCollapsingMergeTree-one_shard_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER one_shard_cluster (d Date, a String, b UInt8, x String, y Int8, version UInt64, sign Int8 DEFAULT 1) \
ENGINE = ReplicatedVersionedCollapsingMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}', sign, version) PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"one_shard_cluster",
),
"ReplicatedGraphiteMergeTree-sharded_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER sharded_cluster (d Date, a String, b UInt8, x String, y Int8, Path String, Time DateTime, Value Float64, col UInt64, Timestamp Int64) \
ENGINE = ReplicatedGraphiteMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}', 'graphite_rollup_example') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"sharded_cluster",
),
"ReplicatedGraphiteMergeTree-one_shard_cluster": table_tuple(
"CREATE TABLE {name} ON CLUSTER one_shard_cluster (d Date, a String, b UInt8, x String, y Int8, Path String, Time DateTime, Value Float64, col UInt64, Timestamp Int64) \
ENGINE = ReplicatedGraphiteMergeTree('/clickhouse/tables/{{shard}}/{name}', '{{replica}}', 'graphite_rollup_example') PARTITION BY y ORDER BY (b, d) PRIMARY KEY b",
"one_shard_cluster",
),
}
| 67.742857
| 247
| 0.679179
| 896
| 7,113
| 5.28683
| 0.071429
| 0.048554
| 0.074309
| 0.093097
| 0.843572
| 0.843572
| 0.843572
| 0.843572
| 0.843572
| 0.843572
| 0
| 0.014193
| 0.197666
| 7,113
| 104
| 248
| 68.394231
| 0.81584
| 0
| 0
| 0.607843
| 0
| 0.343137
| 0.403627
| 0.185716
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009804
| 0
| 0.009804
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d9b8ef94845daefbfe9c87935392495f7694ff3
| 205
|
py
|
Python
|
generated-libraries/python/netapp/interim/cluster_serial_number.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | 2
|
2017-03-28T15:31:26.000Z
|
2018-08-16T22:15:18.000Z
|
generated-libraries/python/netapp/interim/cluster_serial_number.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
generated-libraries/python/netapp/interim/cluster_serial_number.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
class ClusterSerialNumber(basestring):
"""
Assigned Cluster serial-number during cluster create
"""
@staticmethod
def get_api_name():
return "cluster-serial-number"
| 20.5
| 56
| 0.643902
| 19
| 205
| 6.842105
| 0.789474
| 0.2
| 0.292308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 205
| 9
| 57
| 22.777778
| 0.866667
| 0.253659
| 0
| 0
| 0
| 0
| 0.153285
| 0.153285
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5d09162624eba12b5c54dc667ff3231d7ededc1c
| 39
|
py
|
Python
|
basic/emoji.py
|
jspw/Basic_Python
|
aa159f576a471c6deebdf1e5f462dfc9ffb4930b
|
[
"Unlicense"
] | 6
|
2020-06-25T14:52:09.000Z
|
2021-08-05T20:54:15.000Z
|
basic/emoji.py
|
jspw/Basic_Python
|
aa159f576a471c6deebdf1e5f462dfc9ffb4930b
|
[
"Unlicense"
] | null | null | null |
basic/emoji.py
|
jspw/Basic_Python
|
aa159f576a471c6deebdf1e5f462dfc9ffb4930b
|
[
"Unlicense"
] | null | null | null |
print("\U0001F600")
print("\U0001F604")
| 19.5
| 19
| 0.717949
| 4
| 39
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.368421
| 0.025641
| 39
| 2
| 20
| 19.5
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
5d1e0f28d1f68858f4c8647fdd47e34638334024
| 46,257
|
py
|
Python
|
DNN_base.py
|
Blue-Giant/MscaleDNN_tf1
|
973913a633cac4886d52411d4b127bb906d47154
|
[
"MIT"
] | 1
|
2021-12-29T05:00:48.000Z
|
2021-12-29T05:00:48.000Z
|
DNN_base.py
|
Blue-Giant/MscaleDNN_tf1
|
973913a633cac4886d52411d4b127bb906d47154
|
[
"MIT"
] | null | null | null |
DNN_base.py
|
Blue-Giant/MscaleDNN_tf1
|
973913a633cac4886d52411d4b127bb906d47154
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@author: xi'an Li
Created on 2020.05.31
Modified on 2020.06.17
Modified on 2021.10.15
Modified and formed the final version on 2022.5.15
"""
import tensorflow as tf
import numpy as np
def pairwise_distance(point_set):
"""Compute pairwise distance of a point cloud.
Args:
(x-y)^2 = x^2 - 2xy + y^2
point_set: tensor (num_points, dims2point)
Returns:
pairwise distance: (num_points, num_points)
"""
point_set_shape = point_set.get_shape().as_list()
assert(len(point_set_shape)) == 2
point_set_transpose = tf.transpose(point_set, perm=[1, 0])
point_set_inner = tf.matmul(point_set, point_set_transpose)
point_set_inner = -2 * point_set_inner
point_set_square = tf.reduce_sum(tf.square(point_set), axis=-1, keepdims=True)
point_set_square_transpose = tf.transpose(point_set_square, perm=[1, 0])
return point_set_square + point_set_inner + point_set_square_transpose
def np_pairwise_distance(point_set):
"""Compute pairwise distance of a point cloud.
Args:
(x-y)^2 = x^2 - 2xy + y^2
point_set: numpy (num_points, dims2point)
Returns:
pairwise distance: (num_points, num_points)
"""
point_set_shape = point_set.get_shape().as_list()
assert(len(point_set_shape)) == 2
point_set_transpose = tf.transpose(point_set, perm=[1, 0])
point_set_inner = tf.matmul(point_set, point_set_transpose)
point_set_inner = -2 * point_set_inner
point_set_square = tf.reduce_sum(tf.square(point_set), axis=-1, keepdims=True)
point_set_square_transpose = tf.transpose(point_set_square, perm=[1, 0])
return point_set_square + point_set_inner + point_set_square_transpose
def knn_includeself(dist_matrix, k=20):
"""Get KNN based on the pairwise distance.
How to use tf.nn.top_k(): https://blog.csdn.net/wuguangbin1230/article/details/72820627
Args:
pairwise distance: (num_points, num_points)
k: int
Returns:
nearest neighbors: (num_points, k)
"""
neg_dist = -1.0*dist_matrix
_, nn_idx = tf.nn.top_k(neg_dist, k=k) # 这个函数的作用是返回 input 中每行最大的 k 个数,并且返回它们所在位置的索引
return nn_idx
def np_knn_includeself(dist_matrix, k=20):
"""Get KNN based on the pairwise distance.
How to use tf.nn.top_k(): https://blog.csdn.net/wuguangbin1230/article/details/72820627
Args:
pairwise distance: (num_points, num_points)
k: int
Returns:
nearest neighbors: (num_points, k)
"""
neg_dist = -1.0*dist_matrix
_, nn_idx = np.argpartition(neg_dist, k=k) # 这个函数的作用是返回 input 中每行最大的 k 个数,并且返回它们所在位置的索引
return nn_idx
def knn_excludeself(dist_matrix, k=20):
"""Get KNN based on the pairwise distance.
Args:
pairwise distance: (num_points, num_points)
k: int
Returns:
nearest neighbors index: (num_points, k)
"""
neg_dist = -1.0*dist_matrix
k_neighbors = k+1
_, knn_idx = tf.nn.top_k(neg_dist, k=k_neighbors) # 这个函数的作用是返回 input 中每行最大的 k 个数,并且返回它们所在位置的索引
nn_idx = knn_idx[:, 1: k_neighbors]
return nn_idx
def get_kneighbors_3D_4DTensor(point_set, nn_idx):
"""Construct neighbors feature for each point
Args:
point_set: (batch_size, num_points, 1, dim)
nn_idx: (batch_size, num_points, k)
k: int
Returns:
neighbors features: (batch_size, num_points, k, dim)
"""
og_batch_size = point_set.get_shape().as_list()[0]
og_num_dims = point_set.get_shape().as_list()[-1]
point_set = tf.squeeze(point_set)
if og_batch_size == 1:
point_set = tf.expand_dims(point_set, 0)
if og_num_dims == 1:
point_set = tf.expand_dims(point_set, -1)
point_set_shape = point_set.get_shape()
batch_size = point_set_shape[0].value
num_points = point_set_shape[1].value
num_dims = point_set_shape[2].value
idx_ = tf.range(batch_size) * num_points
idx_ = tf.reshape(idx_, [batch_size, 1, 1])
point_set_flat = tf.reshape(point_set, [-1, num_dims])
point_set_neighbors = tf.gather(point_set_flat, nn_idx + idx_)
return point_set_neighbors
def get_kneighbors_2DTensor(point_set, nn_idx):
"""Construct neighbors feature for each point
Args:
point_set: (num_points, dim)
nn_idx: (num_points, k_num)
num_points: the number of point
k_num: the number of neighbor
Returns:
neighbors features: (num_points, k_num, dim)
"""
shape2point_set = point_set.get_shape().as_list()
assert(len(shape2point_set) == 2)
point_set_neighbors = tf.gather(point_set, nn_idx)
return point_set_neighbors
def cal_attends2neighbors(edge_point_set, dis_model='L1'):
"""
Args:
edge_point_set:(num_points, k_neighbors, dim2point)
dis_model:
return:
atten_ceof: (num_points, 1, k_neighbors)
"""
square_edges = tf.square(edge_point_set) # (num_points, k_neighbors, dim2point)
norm2edges = tf.reduce_sum(square_edges, axis=-1, keepdims=True) # (num_points, k_neighbors)
if str.lower(dis_model) == 'l1':
norm2edges = tf.sqrt(norm2edges)
exp_dis = tf.exp(-norm2edges) # (num_points, k_neighbors)
normalize_exp_dis = tf.nn.softmax(exp_dis, axis=1)
atten_ceof = tf.transpose(normalize_exp_dis, perm=[0, 2, 1]) # (num_points, 1, k_neighbors)
return atten_ceof
def cal_edgesNorm_attends2neighbors(edge_point_set, dis_model='L1'):
"""
Args:
edge_point_set:(num_points, k_neighbors, dim2point)
dis_model:
return:
atten_ceof: (num_points, 1, k_neighbors)
"""
square_edges = tf.square(edge_point_set) # (num_points, k_neighbors, dim2point)
norm2edges = tf.reduce_sum(square_edges, axis=-1, keepdims=True) # (num_points, k_neighbors)
if str.lower(dis_model) == 'l1':
norm2edges = tf.sqrt(norm2edges)
normalize_edgeNrom = tf.nn.softmax(norm2edges, axis=1)
exp_dis = tf.exp(-norm2edges) # (num_points, k_neighbors)
normalize_exp_dis = tf.nn.softmax(exp_dis, axis=1)
atten_ceof = tf.transpose(normalize_exp_dis, perm=[0, 2, 1])
return normalize_edgeNrom, atten_ceof
# ---------------------------------------------- my activations -----------------------------------------------
def linear(x):
return x
def mysin(x):
# return tf.sin(2*np.pi*x)
return tf.sin(x)
# return 0.5*tf.sin(x)
def srelu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)
def s2relu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)
# return 1.5*tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)
# return 1.25*tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)
def sinAddcos(x):
return 0.5*(tf.sin(x) + tf.cos(x))
# return tf.sin(x) + tf.cos(x)
def sinAddcos_sReLu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)*(tf.sin(2*np.pi*x) + tf.cos(2*np.pi*x))
def s3relu(x):
# return 0.5*tf.nn.relu(1-x)*tf.nn.relu(1+x)*tf.sin(2*np.pi*x)
# return 0.21*tf.nn.relu(1-x)*tf.nn.relu(1+x)*tf.sin(2*np.pi*x)
# return tf.nn.relu(1 - x) * tf.nn.relu(x) * (tf.sin(2 * np.pi * x) + tf.cos(2 * np.pi * x)) # (work不好)
# return tf.nn.relu(1 - x) * tf.nn.relu(1 + x) * (tf.sin(2 * np.pi * x) + tf.cos(2 * np.pi * x)) #(不work)
return tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.sin(2*np.pi*tf.abs(x)) # work 不如 s2relu
# return tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.sin(2*np.pi*x) # work 不如 s2relu
# return 1.5*tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.sin(np.pi*x)
# return tf.nn.relu(1 - x) * tf.nn.relu(x+0.5) * tf.sin(2 * np.pi * x)
def csrelu(x):
# return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.cos(np.pi*x)
return 1.5*tf.nn.relu(1 - x) * tf.nn.relu(x) * tf.cos(np.pi * x)
# return tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.cos(np.pi*x)
def stanh(x):
return tf.tanh(x)*tf.sin(2*np.pi*x)
def gauss(x):
return tf.exp(-1.0 * x * x)
# return 0.2*tf.exp(-4*x*x)
# return 0.25*tf.exp(-7.5*(x-0.5)*(x-0.5))
def mexican(x):
return (1-x*x)*tf.exp(-0.5*x*x)
def modify_mexican(x):
# return 1.25*x*tf.exp(-0.25*x*x)
# return x * tf.exp(-0.125 * x * x)
return x * tf.exp(-0.075*x * x)
# return -1.25*x*tf.exp(-0.25*x*x)
def sm_mexican(x):
# return tf.sin(np.pi*x) * x * tf.exp(-0.075*x * x)
# return tf.sin(np.pi*x) * x * tf.exp(-0.125*x * x)
return 2.0*tf.sin(np.pi*x) * x * tf.exp(-0.5*x * x)
def singauss(x):
# return 0.6 * tf.exp(-4 * x * x) * tf.sin(np.pi * x)
# return 0.6 * tf.exp(-5 * x * x) * tf.sin(np.pi * x)
# return 0.75*tf.exp(-5*x*x)*tf.sin(2*np.pi*x)
# return tf.exp(-(x-0.5) * (x - 0.5)) * tf.sin(np.pi * x)
# return 0.25 * tf.exp(-3.5 * x * x) * tf.sin(2 * np.pi * x)
# return 0.225*tf.exp(-2.5 * (x - 0.5) * (x - 0.5)) * tf.sin(2*np.pi * x)
return 0.225 * tf.exp(-2 * (x - 0.5) * (x - 0.5)) * tf.sin(2 * np.pi * x)
# return 0.4 * tf.exp(-10 * (x - 0.5) * (x - 0.5)) * tf.sin(2 * np.pi * x)
# return 0.45 * tf.exp(-5 * (x - 1.0) * (x - 1.0)) * tf.sin(np.pi * x)
# return 0.3 * tf.exp(-5 * (x - 1.0) * (x - 1.0)) * tf.sin(2 * np.pi * x)
# return tf.sin(2*np.pi*tf.exp(-0.5*x*x))
def powsin_srelu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)*tf.sin(2*np.pi*x)
def sin2_srelu(x):
return 2.0*tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(4*np.pi*x)*tf.sin(2*np.pi*x)
def slrelu(x):
return tf.nn.leaky_relu(1-x)*tf.nn.leaky_relu(x)
def pow2relu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.nn.relu(x)
def selu(x):
return tf.nn.elu(1-x)*tf.nn.elu(x)
def wave(x):
return tf.nn.relu(x) - 2*tf.nn.relu(x-1/4) + \
2*tf.nn.relu(x-3/4) - tf.nn.relu(x-1)
def phi(x):
return tf.nn.relu(x) * tf.nn.relu(x)-3*tf.nn.relu(x-1)*tf.nn.relu(x-1) + 3*tf.nn.relu(x-2)*tf.nn.relu(x-2) \
- tf.nn.relu(x-3)*tf.nn.relu(x-3)*tf.nn.relu(x-3)
def gelu(x):
out = x*tf.exp(x)/(1+tf.exp(x))
return out
def mgelu(x):
temp2x = np.sqrt(2 / np.pi) * (x + 0.044715 * x * x * x)
# out = 0.5*+ 0.5*x*tf.tanh(temp2x)
out = 0.25 * x * tf.tanh(temp2x)
return out
# ------------------------------------------------ 初始化权重和偏置 --------------------------------------------
# 生成DNN的权重和偏置
# tf.random_normal(): 用于从服从指定正太分布的数值中取出随机数
# tf.random_normal(shape,mean=0.0,stddev=1.0,dtype=tf.float32,seed=None,name=None)
# hape: 输出张量的形状,必选.--- mean: 正态分布的均值,默认为0.----stddev: 正态分布的标准差,默认为1.0
# dtype: 输出的类型,默认为tf.float32 ----seed: 随机数种子,是一个整数,当设置之后,每次生成的随机数都一样---name: 操作的名称
def Generally_Init_NN(in_size, out_size, hidden_layers, Flag='flag'):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
W = tf.compat.v1.Variable(0.1 * tf.random.normal([in_size, hidden_layers[0]]), dtype='float32',
name='W_transInput' + str(Flag))
B = tf.compat.v1.Variable(0.1 * tf.random.uniform([1, hidden_layers[0]]), dtype='float32',
name='B_transInput' + str(Flag))
Weights.append(W)
Biases.append(B)
# 隐藏层:第二至倒数第二层的权重和偏置
for i_layer in range(n_hiddens - 1):
W = tf.compat.v1.Variable(0.1 * tf.random.normal([hidden_layers[i_layer], hidden_layers[i_layer+1]]),
dtype='float32', name='W_hidden' + str(i_layer + 1) + str(Flag))
B = tf.compat.v1.Variable(0.1 * tf.random.uniform([1, hidden_layers[i_layer+1]]), dtype='float32',
name='B_hidden' + str(i_layer + 1) + str(Flag))
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
W = tf.compat.v1.Variable(0.1 * tf.random.normal([hidden_layers[-1], out_size]), dtype='float32',
name='W_outTrans' + str(Flag))
B = tf.compat.v1.Variable(0.1 * tf.random.uniform([1, out_size]), dtype='float32',
name='B_outTrans' + str(Flag))
Weights.append(W)
Biases.append(B)
return Weights, Biases
# tf.truncated_normal(shape, mean, stddev) :shape表示生成张量的维度,mean是均值,stddev是标准差。这个函数产生正太分布,
# 均值和标准差自己设定。这是一个截断的产生正太分布的函数,就是说产生正太分布的值如果与均值的差值大于两倍的标准差,
# 那就重新生成。和一般的正太分布的产生随机数据比起来,这个函数产生的随机数与均值的差距不会超过两倍的标准差,但是一般的别的函数是可能的。
# truncated_normal(
# shape,
# mean=0.0,
# stddev=1.0,
# dtype=tf.float32,
# seed=None,
# name=None)
def truncated_normal_init(in_dim, out_dim, scale_coef=1.0, weight_name='weight'):
xavier_stddev = np.sqrt(2/(in_dim + out_dim))
# 尺度因子防止初始化的数值太小或者太大
V = tf.compat.v1.Variable(scale_coef*tf.truncated_normal([in_dim, out_dim], stddev=xavier_stddev), dtype=tf.float32,
name=weight_name)
return V
# tf.random_uniform()
# 默认是在 0 到 1 之间产生随机数,也可以通过 minval 和 maxval 指定上下界
def uniform_init(in_dim, out_dim, weight_name='weight'):
V = tf.compat.v1.Variable(tf.random_uniform([in_dim, out_dim], dtype=tf.float32), dtype=tf.float32,
name=weight_name)
return V
# tf.random_normal(shape, mean=0.0, stddev=1.0, dtype=tf.float32, seed=None, name=None)
# 从正态分布中输出随机值。
# 参数:
# shape: 一维的张量, 也是输出的张量。
# mean: 正态分布的均值。
# stddev: 正态分布的标准差。
# dtype: 输出的类型。
# seed: 一个整数,当设置之后,每次生成的随机数都一样。
# name: 操作的名字。
def normal_init(in_dim, out_dim, scale_coef=1.0, weight_name='weight'):
stddev2normal = np.sqrt(2.0/(in_dim + out_dim))
# 尺度因子防止初始化的数值太小或者太大
V = tf.compat.v1.Variable(scale_coef*tf.random_normal([in_dim, out_dim], mean=0, stddev=stddev2normal,
dtype=tf.float32), dtype=tf.float32, name=weight_name)
return V
def Truncated_normal_init_NN(in_size, out_size, hidden_layers, Flag='flag'):
with tf.compat.v1.variable_scope('WB_scope', reuse=tf.compat.v1.AUTO_REUSE):
scale = 5.0
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
W = truncated_normal_init(in_size, hidden_layers[0], scale_coef=scale, weight_name='W-transInput' + str(Flag))
B = uniform_init(1, hidden_layers[0], weight_name='B-transInput' + str(Flag))
Weights.append(W)
Biases.append(B)
for i_layer in range(0, n_hiddens - 1):
W = truncated_normal_init(hidden_layers[i_layer], hidden_layers[i_layer + 1], scale_coef=scale,
weight_name='W-hidden' + str(i_layer + 1) + str(Flag))
B = uniform_init(1, hidden_layers[i_layer + 1], weight_name='B-hidden' + str(i_layer + 1) + str(Flag))
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
W = truncated_normal_init(hidden_layers[-1], out_size, scale_coef=scale, weight_name='W-outTrans' + str(Flag))
B = uniform_init(1, out_size, weight_name='B-outTrans' + str(Flag))
Weights.append(W)
Biases.append(B)
return Weights, Biases
def Xavier_init_NN(in_size, out_size, hidden_layers, Flag='flag', varcoe=0.5):
with tf.compat.v1.variable_scope('WB_scope', reuse=tf.compat.v1.AUTO_REUSE):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
stddev_WB = (2.0 / (in_size + hidden_layers[0])) ** varcoe
W = tf.compat.v1.get_variable(name='W-transInput' + str(Flag), shape=(in_size, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B-transInput' + str(Flag), shape=(hidden_layers[0],),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
Weights.append(W)
Biases.append(B)
for i_layer in range(0, n_hiddens - 1):
stddev_WB = (2.0 / (hidden_layers[i_layer] + hidden_layers[i_layer + 1])) ** varcoe
W = tf.compat.v1.get_variable(
name='W' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer], hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer + 1],),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
stddev_WB = (2.0 / (hidden_layers[-1] + out_size)) ** varcoe
W = tf.compat.v1.get_variable(name='W-outTrans' + str(Flag), shape=(hidden_layers[-1], out_size),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B-outTrans' + str(Flag), shape=(out_size,),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
return Weights, Biases
def Xavier_init_NN_Fourier(in_size, out_size, hidden_layers, Flag='flag', varcoe=0.5):
with tf.compat.v1.variable_scope('WB_scope', reuse=tf.compat.v1.AUTO_REUSE):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
stddev_WB = (2.0 / (in_size + hidden_layers[0])) ** varcoe
W = tf.compat.v1.get_variable(name='W-transInput' + str(Flag), shape=(in_size, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B-transInput' + str(Flag), shape=(hidden_layers[0],),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
Weights.append(W)
Biases.append(B)
for i_layer in range(0, n_hiddens - 1):
stddev_WB = (2.0 / (hidden_layers[i_layer] + hidden_layers[i_layer + 1])) ** varcoe
if 0 == i_layer:
W = tf.compat.v1.get_variable(
name='W' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer]*2, hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer + 1],),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
else:
W = tf.compat.v1.get_variable(
name='W' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer], hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer + 1],),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
stddev_WB = (2.0 / (hidden_layers[-1] + out_size)) ** varcoe
W = tf.compat.v1.get_variable(name='W-outTrans' + str(Flag), shape=(hidden_layers[-1], out_size),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B-outTrans' + str(Flag), shape=(out_size,),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
return Weights, Biases
def Xavier_init_NN_RBF(in_size, out_size, hidden_layers, Flag='flag', varcoe=0.5, opt2init_B2RBF='uniform_random',
opt2init_W2RBF='uniform_random', train_W2RBF=True, train_B2RBF=True, left_value=0.0,
right_value=1.0, shuffle_W2RBF=True, shuffle_B2RBF=True, value_max2weight=1.0):
with tf.compat.v1.variable_scope('WB_scope', reuse=tf.compat.v1.AUTO_REUSE):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
stddev_WB = (2.0 / (in_size + hidden_layers[0])) ** varcoe
if opt2init_W2RBF == 'uniform_random':
W2RBF = tf.compat.v1.get_variable(name='W2RBF' + str(Flag),
initializer=tf.random.uniform([1, hidden_layers[0]],
maxval=value_max2weight),
dtype=tf.float32, trainable=train_W2RBF)
if shuffle_W2RBF:
W2RBF = tf.random_shuffle(W2RBF) # 如果对 W2RBF求导,random_shuffle没有梯度定义
else:
W2RBF = tf.compat.v1.get_variable(name='W2RBF' + str(Flag), shape=(1, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32, trainable=train_W2RBF)
if opt2init_B2RBF == 'uniform_random':
B2RBF = tf.compat.v1.get_variable(name='B2RBF' + str(Flag),
initializer=tf.random.uniform([in_size, hidden_layers[0]],
minval=left_value, maxval=right_value),
dtype=tf.float32, trainable=train_B2RBF)
if shuffle_B2RBF:
B2RBF = tf.random_shuffle(B2RBF)
else:
B2RBF = tf.compat.v1.get_variable(name='B2RBF' + str(Flag), shape=(in_size, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32, trainable=train_B2RBF)
Weights.append(W2RBF)
Biases.append(B2RBF)
for i_layer in range(0, n_hiddens - 1):
stddev_WB = (2.0 / (hidden_layers[i_layer] + hidden_layers[i_layer + 1])) ** varcoe
if 0 == i_layer:
W = tf.compat.v1.get_variable(
name='W' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer], hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer + 1],),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
else:
W = tf.compat.v1.get_variable(
name='W' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer], hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer + 1],),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
stddev_WB = (2.0 / (hidden_layers[-1] + out_size)) ** varcoe
W = tf.compat.v1.get_variable(name='W-outTrans' + str(Flag), shape=(hidden_layers[-1], out_size),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.compat.v1.get_variable(name='B-outTrans' + str(Flag), shape=(out_size,),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
return Weights, Biases
# ----------------------------------- 正则化 -----------------------------------------------
def regular_weights_biases_L1(weights, biases):
# L1正则化权重和偏置
layers = len(weights)
regular_w = 0
regular_b = 0
for i_layer1 in range(layers):
regular_w = regular_w + tf.reduce_sum(tf.abs(weights[i_layer1]), keep_dims=False)
regular_b = regular_b + tf.reduce_sum(tf.abs(biases[i_layer1]), keep_dims=False)
return regular_w + regular_b
# L2正则化权重和偏置
def regular_weights_biases_L2(weights, biases):
layers = len(weights)
regular_w = 0
regular_b = 0
for i_layer1 in range(layers):
regular_w = regular_w + tf.reduce_sum(tf.square(weights[i_layer1]), keep_dims=False)
regular_b = regular_b + tf.reduce_sum(tf.square(biases[i_layer1]), keep_dims=False)
return regular_w + regular_b
# -------------------------------------------- 网络模型 ------------------------------------------------------
def DNN(variable_input, Weights, Biases, hiddens, activateIn_name='tanh', activate_name='tanh', activateOut_name='linear'):
"""
Args:
variable_input: the input data, dim:NxD
Weights: the weight for each hidden layer
Biases: the bias for each hidden layer
hiddens: a list or tuple for hidden-layer, it contains the num of neural units
activateIn_name: the name of activation function for input-layer
activate_name: the name of activation function for hidden-layer
activateOut_name: the name of activation function for output-layer
return:
output data, dim:NxD', generally D'=1
"""
if str.lower(activateIn_name) == 'relu':
act_in = tf.nn.relu
elif str.lower(activateIn_name) == 'leaky_relu':
act_in = tf.nn.leaky_relu(0.2)
elif str.lower(activateIn_name) == 'srelu':
act_in = srelu
elif str.lower(activateIn_name) == 's2relu':
act_in = s2relu
elif str.lower(activateIn_name) == 'elu':
act_in = tf.nn.elu
elif str.lower(activateIn_name) == 'sin':
act_in = mysin
elif str.lower(activateIn_name) == 'sinaddcos':
act_in = sinAddcos
elif str.lower(activateIn_name) == 'tanh':
act_in = tf.tanh
elif str.lower(activateIn_name) == 'gauss':
act_in = gauss
elif str.lower(activateIn_name) == 'softplus':
act_in = tf.nn.softplus
elif str.lower(activateIn_name) == 'sigmoid':
act_in = tf.nn.sigmoid
elif str.lower(activateIn_name) == 'gelu':
act_in = gelu
elif str.lower(activateIn_name) == 'mgelu':
act_in = mgelu
else:
act_in = linear
if str.lower(activate_name) == 'relu':
act_func = tf.nn.relu
elif str.lower(activate_name) == 'leaky_relu':
act_func = tf.nn.leaky_relu(0.2)
elif str.lower(activate_name) == 'srelu':
act_func = srelu
elif str.lower(activate_name) == 's2relu':
act_func = s2relu
elif str.lower(activate_name) == 'elu':
act_func = tf.nn.elu
elif str.lower(activate_name) == 'sin':
act_func = mysin
elif str.lower(activate_name) == 'sinaddcos':
act_func = sinAddcos
elif str.lower(activate_name) == 'tanh':
act_func = tf.tanh
elif str.lower(activate_name) == 'gauss':
act_func = gauss
elif str.lower(activate_name) == 'softplus':
act_func = tf.nn.softplus
elif str.lower(activate_name) == 'sigmoid':
act_func = tf.nn.sigmoid
elif str.lower(activate_name) == 'gelu':
act_func = gelu
elif str.lower(activate_name) == 'mgelu':
act_func = mgelu
else:
act_func = linear
if str.lower(activateOut_name) == 'relu':
act_out = tf.nn.relu
elif str.lower(activateOut_name) == 'leaky_relu':
act_out = tf.nn.leaky_relu(0.2)
elif str.lower(activateOut_name) == 'srelu':
act_out = srelu
elif str.lower(activateOut_name) == 's2relu':
act_out = s2relu
elif str.lower(activateOut_name) == 'elu':
act_out = tf.nn.elu
elif str.lower(activateOut_name) == 'sin':
act_out = mysin
elif str.lower(activateOut_name) == 'sinaddcos':
act_out = sinAddcos
elif str.lower(activateOut_name) == 'tanh':
act_out = tf.nn.tanh
elif str.lower(activateOut_name) == 'gauss':
act_out = gauss
elif str.lower(activateOut_name) == 'softplus':
act_out = tf.nn.softplus
elif str.lower(activateOut_name) == 'sigmoid':
act_out = tf.nn.sigmoid
elif str.lower(activateOut_name) == 'gelu':
act_out = gelu
elif str.lower(activateOut_name) == 'mgelu':
act_out = mgelu
else:
act_out = linear
layers = len(hiddens) + 1 # 得到输入到输出的层数,即隐藏层层数
H = variable_input # 代表输入数据,即输入层
hidden_record = 0
for k in range(layers-1):
H_pre = H
W = Weights[k]
B = Biases[k]
if k == 0:
H = act_in(tf.add(tf.matmul(H, W), B))
else:
H = act_func(tf.add(tf.matmul(H, W), B))
if hiddens[k] == hidden_record:
H = H+H_pre
hidden_record = hiddens[k]
W_out = Weights[-1]
B_out = Biases[-1]
output = tf.add(tf.matmul(H, W_out), B_out)
output = act_out(output)
return output
def DNN_scale(variable_input, Weights, Biases, hiddens, freq_frag, activateIn_name='tanh', activate_name='tanh',
activateOut_name='linear', repeat_Highfreq=True):
"""
Args:
variable_input: the input data, dim:NxD
Weights: the weight for each hidden layer
Biases: the bias for each hidden layer
hiddens: a list or tuple for hidden-layer, it contains the num of neural units
freq_frag: a list or tuple for scale-factor
activateIn_name: the name of activation function for input-layer
activate_name: the name of activation function for hidden-layer
activateOut_name: the name of activation function for output-layer
repeat_Highfreq: repeat the high-freq factor or not
return:
output data, dim:NxD', generally D'=1
"""
if str.lower(activateIn_name) == 'relu':
act_in = tf.nn.relu
elif str.lower(activateIn_name) == 'leaky_relu':
act_in = tf.nn.leaky_relu(0.2)
elif str.lower(activateIn_name) == 'srelu':
act_in = srelu
elif str.lower(activateIn_name) == 's2relu':
act_in = s2relu
elif str.lower(activateIn_name) == 'elu':
act_in = tf.nn.elu
elif str.lower(activateIn_name) == 'sin':
act_in = tf.sin
elif str.lower(activateIn_name) == 'sinaddcos':
act_in = sinAddcos
elif str.lower(activateIn_name) == 'tanh':
act_in = tf.tanh
elif str.lower(activateIn_name) == 'gauss':
act_in = gauss
elif str.lower(activateIn_name) == 'softplus':
act_in = tf.nn.softplus
elif str.lower(activateIn_name) == 'sigmoid':
act_in = tf.nn.sigmoid
elif str.lower(activateIn_name) == 'gelu':
act_in = gelu
elif str.lower(activateIn_name) == 'mgelu':
act_in = mgelu
else:
act_in = linear
if str.lower(activate_name) == 'relu':
act_func = tf.nn.relu
elif str.lower(activate_name) == 'leaky_relu':
act_func = tf.nn.leaky_relu(0.2)
elif str.lower(activate_name) == 'srelu':
act_func = srelu
elif str.lower(activate_name) == 's2relu':
act_func = s2relu
elif str.lower(activate_name) == 'elu':
act_func = tf.nn.elu
elif str.lower(activate_name) == 'sin':
act_func = mysin
elif str.lower(activate_name) == 'sinaddcos':
act_func = sinAddcos
elif str.lower(activate_name) == 'tanh':
act_func = tf.tanh
elif str.lower(activate_name) == 'gauss':
act_func = gauss
elif str.lower(activate_name) == 'softplus':
act_func = tf.nn.softplus
elif str.lower(activate_name) == 'sigmoid':
act_func = tf.nn.sigmoid
elif str.lower(activate_name) == 'gelu':
act_func = gelu
elif str.lower(activate_name) == 'mgelu':
act_func = mgelu
else:
act_func = linear
if str.lower(activateOut_name) == 'relu':
act_out = tf.nn.relu
elif str.lower(activateOut_name) == 'leaky_relu':
act_out = tf.nn.leaky_relu(0.2)
elif str.lower(activateOut_name) == 'srelu':
act_out = srelu
elif str.lower(activateOut_name) == 's2relu':
act_out = s2relu
elif str.lower(activateOut_name) == 'elu':
act_out = tf.nn.elu
elif str.lower(activateOut_name) == 'sin':
act_out = mysin
elif str.lower(activateOut_name) == 'sinaddcos':
act_out = sinAddcos
elif str.lower(activateOut_name) == 'tanh':
act_out = tf.nn.tanh
elif str.lower(activateOut_name) == 'gauss':
act_out = gauss
elif str.lower(activateOut_name) == 'softplus':
act_out = tf.nn.softplus
elif str.lower(activateOut_name) == 'sigmoid':
act_out = tf.nn.sigmoid
elif str.lower(activateOut_name) == 'gelu':
act_out = gelu
elif str.lower(activateOut_name) == 'mgelu':
act_out = mgelu
else:
act_out = linear
Unit_num = int(hiddens[0] / len(freq_frag))
# np.repeat(a, repeats, axis=None)
# 输入: a是数组,repeats是各个元素重复的次数(repeats一般是个标量,稍复杂点是个list),在axis的方向上进行重复
# 返回: 如果不指定axis,则将重复后的结果展平(维度为1)后返回;如果指定axis,则不展平
mixcoe = np.repeat(freq_frag, Unit_num)
# 这个的作用是什么?
if repeat_Highfreq==True:
mixcoe = np.concatenate((mixcoe, np.ones([hiddens[0] - Unit_num * len(freq_frag)]) * freq_frag[-1]))
else:
mixcoe = np.concatenate((np.ones([hiddens[0] - Unit_num * len(freq_frag)]) * freq_frag[0], mixcoe))
mixcoe = mixcoe.astype(np.float32)
layers = len(hiddens) + 1 # 得到输入到输出的层数,即隐藏层层数
H = variable_input # 代表输入数据,即输入层
W_in = Weights[0]
B_in = Biases[0]
if len(freq_frag) == 1:
H = tf.add(tf.matmul(H, W_in), B_in)
else:
H = tf.add(tf.matmul(H, W_in)*mixcoe, B_in)
H = act_in(H)
hidden_record = hiddens[0]
for k in range(layers-2):
H_pre = H
W = Weights[k+1]
B = Biases[k+1]
H = act_func(tf.add(tf.matmul(H, W), B))
if hiddens[k+1] == hidden_record:
H = H + H_pre
hidden_record = hiddens[k+1]
W_out = Weights[-1]
B_out = Biases[-1]
output = tf.add(tf.matmul(H, W_out), B_out)
# 下面这个是输出层
output = act_out(output)
return output
def DNN_adapt_scale(variable_input, Weights, Biases, hiddens, freq_frag, activateIn_name='tanh', activate_name='tanh',
activateOut_name='linear', repeat_Highfreq=True):
"""
Args:
variable_input: the input data, dim:NxD
Weights: the weight for each hidden layer
Biases: the bias for each hidden layer
hiddens: a list or tuple for hidden-layer, it contains the num of neural units
freq_frag: a list or tuple for scale-factor
activateIn_name: the name of activation function for input-layer
activate_name: the name of activation function for hidden-layer
activateOut_name: the name of activation function for output-layer
repeat_Highfreq: repeat the high-freq factor or not
return:
output data, dim:NxD', generally D'=1
"""
if str.lower(activateIn_name) == 'relu':
act_in = tf.nn.relu
elif str.lower(activateIn_name) == 'leaky_relu':
act_in = tf.nn.leaky_relu(0.2)
elif str.lower(activateIn_name) == 'srelu':
act_in = srelu
elif str.lower(activateIn_name) == 's2relu':
act_in = s2relu
elif str.lower(activateIn_name) == 'elu':
act_in = tf.nn.elu
elif str.lower(activateIn_name) == 'sin':
act_in = tf.sin
elif str.lower(activateIn_name) == 'sinaddcos':
act_in = sinAddcos
elif str.lower(activateIn_name) == 'tanh':
act_in = tf.tanh
elif str.lower(activateIn_name) == 'gauss':
act_in = gauss
elif str.lower(activateIn_name) == 'softplus':
act_in = tf.nn.softplus
elif str.lower(activateIn_name) == 'sigmoid':
act_in = tf.nn.sigmoid
elif str.lower(activateIn_name) == 'gelu':
act_in = gelu
elif str.lower(activateIn_name) == 'mgelu':
act_in = mgelu
else:
act_in = linear
if str.lower(activate_name) == 'relu':
act_func = tf.nn.relu
elif str.lower(activate_name) == 'leaky_relu':
act_func = tf.nn.leaky_relu(0.2)
elif str.lower(activate_name) == 'srelu':
act_func = srelu
elif str.lower(activate_name) == 's2relu':
act_func = s2relu
elif str.lower(activate_name) == 'elu':
act_func = tf.nn.elu
elif str.lower(activate_name) == 'sin':
act_func = mysin
elif str.lower(activate_name) == 'tanh':
act_func = tf.tanh
elif str.lower(activate_name) == 'gauss':
act_func = gauss
elif str.lower(activate_name) == 'softplus':
act_func = tf.nn.softplus
elif str.lower(activate_name) == 'sigmoid':
act_func = tf.nn.sigmoid
elif str.lower(activate_name) == 'gelu':
act_func = gelu
elif str.lower(activate_name) == 'mgelu':
act_func = mgelu
else:
act_func = linear
if str.lower(activateOut_name) == 'relu':
act_out = tf.nn.relu
elif str.lower(activateOut_name) == 'leaky_relu':
act_out = tf.nn.leaky_relu(0.2)
elif str.lower(activateOut_name) == 'srelu':
act_out = srelu
elif str.lower(activateOut_name) == 's2relu':
act_out = s2relu
elif str.lower(activateOut_name) == 'elu':
act_out = tf.nn.elu
elif str.lower(activateOut_name) == 'sin':
act_out = mysin
elif str.lower(activateOut_name) == 'sinaddcos':
act_out = sinAddcos
elif str.lower(activateOut_name) == 'tanh':
act_out = tf.nn.tanh
elif str.lower(activateOut_name) == 'gauss':
act_out = gauss
elif str.lower(activateOut_name) == 'softplus':
act_out = tf.nn.softplus
elif str.lower(activateOut_name) == 'sigmoid':
act_out = tf.nn.sigmoid
elif str.lower(activateOut_name) == 'gelu':
act_out = gelu
elif str.lower(activateOut_name) == 'mgelu':
act_out = mgelu
else:
act_out = linear
Unit_num = int(hiddens[0] / len(freq_frag))
# np.repeat(a, repeats, axis=None)
# 输入: a是数组,repeats是各个元素重复的次数(repeats一般是个标量,稍复杂点是个list),在axis的方向上进行重复
# 返回: 如果不指定axis,则将重复后的结果展平(维度为1)后返回;如果指定axis,则不展平
init_mixcoe = np.repeat(freq_frag, Unit_num)
# 这个的作用是什么?
if repeat_Highfreq==True:
init_mixcoe = np.concatenate((init_mixcoe, np.ones([hiddens[0] - Unit_num * len(freq_frag)]) * freq_frag[-1]))
else:
init_mixcoe = np.concatenate((init_mixcoe, np.ones([hiddens[0] - Unit_num * len(freq_frag)]) * freq_frag[0]))
# 将 int 型的 mixcoe 转化为 发np.flost32 型的 mixcoe,mixcoe[: units[1]]省略了行的维度
init_mixcoe = init_mixcoe.astype(np.float32)
layers = len(hiddens)+1 # 得到输入到输出的层数,即隐藏层层数
H = variable_input # 代表输入数据,即输入层
W_in = Weights[0]
B_in = Biases[0]
mixcoe = tf.get_variable(name='M0', initializer=init_mixcoe)
# mixcoe = tf.exp(mixcoe)
if len(freq_frag) == 1:
H = tf.add(tf.matmul(H, W_in), B_in)
else:
H = tf.add(tf.matmul(H, W_in)*mixcoe, B_in)
H = act_in(H)
hidden_record = hiddens[0]
for k in range(layers-2):
H_pre = H
W = Weights[k+1]
B = Biases[k+1]
H = act_func(tf.add(tf.matmul(H, W), B))
if hiddens[k+1] == hidden_record:
H = H + H_pre
hidden_record = hiddens[k+1]
W_out = Weights[-1]
B_out = Biases[-1]
output = tf.add(tf.matmul(H, W_out), B_out)
# 下面这个是输出层
output = act_out(output)
return output
# FourierBase 代表 cos concatenate sin according to row(i.e. the number of sampling points)
def DNN_FourierBase(variable_input, Weights, Biases, hiddens, freq_frag, activate_name='tanh', activateOut_name='linear',
repeat_Highfreq=True, sFourier=0.5):
"""
Args:
variable_input: the input data, dim:NxD
Weights: the weight for each hidden layer
Biases: the bias for each hidden layer
hiddens: a list or tuple for hidden-layer, it contains the num of neural units
freq_frag: a list or tuple for scale-factor
activateIn_name: the name of activation function for input-layer
activate_name: the name of activation function for hidden-layer
activateOut_name: the name of activation function for output-layer
repeat_Highfreq: repeat the high-freq factor or not
sFourier:a scale factor for adjust the range of input-layer
return:
output data, dim:NxD', generally D'=1
"""
if str.lower(activate_name) == 'relu':
act_func = tf.nn.relu
elif str.lower(activate_name) == 'leaky_relu':
act_func = tf.nn.leaky_relu(0.2)
elif str.lower(activate_name) == 'srelu':
act_func = srelu
elif str.lower(activate_name) == 's2relu':
act_func = s2relu
elif str.lower(activate_name) == 'elu':
act_func = tf.nn.elu
elif str.lower(activate_name) == 'sin':
act_func = mysin
elif str.lower(activate_name) == 'sinaddcos':
act_func = sinAddcos
elif str.lower(activate_name) == 'tanh':
act_func = tf.tanh
elif str.lower(activate_name) == 'gauss':
act_func = gauss
elif str.lower(activate_name) == 'softplus':
act_func = tf.nn.softplus
elif str.lower(activate_name) == 'sigmoid':
act_func = tf.nn.sigmoid
elif str.lower(activate_name) == 'gelu':
act_func = gelu
elif str.lower(activate_name) == 'mgelu':
act_func = mgelu
else:
act_func = linear
if str.lower(activateOut_name) == 'relu':
act_out = tf.nn.relu
elif str.lower(activateOut_name) == 'leaky_relu':
act_out = tf.nn.leaky_relu(0.2)
elif str.lower(activateOut_name) == 'srelu':
act_out = srelu
elif str.lower(activateOut_name) == 's2relu':
act_out = s2relu
elif str.lower(activateOut_name) == 'elu':
act_out = tf.nn.elu
elif str.lower(activateOut_name) == 'sin':
act_out = mysin
elif str.lower(activateOut_name) == 'sinaddcos':
act_out = sinAddcos
elif str.lower(activateOut_name) == 'tanh':
act_out = tf.nn.tanh
elif str.lower(activateOut_name) == 'gauss':
act_out = gauss
elif str.lower(activateOut_name) == 'softplus':
act_out = tf.nn.softplus
elif str.lower(activateOut_name) == 'sigmoid':
act_out = tf.nn.sigmoid
elif str.lower(activateOut_name) == 'gelu':
act_out = gelu
elif str.lower(activateOut_name) == 'mgelu':
act_out = mgelu
else:
act_out = linear
layers = len(hiddens) + 1 # 得到输入到输出的层数,即隐藏层层数
H = variable_input # 代表输入数据,即输入层
# 计算第一个隐藏单元和尺度标记的比例
Unit_num = int(hiddens[0] / len(freq_frag))
# 然后,频率标记按按照比例复制
# np.repeat(a, repeats, axis=None)
# 输入: a是数组,repeats是各个元素重复的次数(repeats一般是个标量,稍复杂点是个list),在axis的方向上进行重复
# 返回: 如果不指定axis,则将重复后的结果展平(维度为1)后返回;如果指定axis,则不展平
mixcoe = np.repeat(freq_frag, Unit_num)
if repeat_Highfreq == True:
# 如果第一个隐藏单元的长度大于复制后的频率标记,那就按照最大的频率在最后补齐
mixcoe = np.concatenate((mixcoe, np.ones([hiddens[0] - Unit_num * len(freq_frag)]) * freq_frag[-1]))
else:
mixcoe = np.concatenate((mixcoe, np.ones([hiddens[0] - Unit_num * len(freq_frag)]) * freq_frag[0]))
mixcoe = mixcoe.astype(np.float32)
W_in = Weights[0]
B_in = Biases[0]
if len(freq_frag) == 1:
H = tf.add(tf.matmul(H, W_in), B_in)
else:
# H = tf.add(tf.matmul(H, W_in)*mixcoe, B_in)
H = tf.matmul(H, W_in) * mixcoe
H = sFourier * (tf.concat([tf.cos(H), tf.sin(H)], axis=-1))
# H = sfactor * (tf.concat([tf.cos(np.pi * H), tf.sin(np.pi * H)], axis=-1))
# H = sfactor * tf.concat([tf.cos(2 * np.pi * H), tf.sin(2 * np.pi * H)], axis=-1)
hiddens_record = hiddens[0]
for k in range(layers-2):
H_pre = H
W = Weights[k+1]
B = Biases[k+1]
H = act_func(tf.add(tf.matmul(H, W), B))
if (hiddens[k+1] == hiddens_record) and (k != 0):
H = H + H_pre
hiddens_record = hiddens[k+1]
W_out = Weights[-1]
B_out = Biases[-1]
output = tf.add(tf.matmul(H, W_out), B_out)
output = act_out(output)
return output
| 40.863074
| 124
| 0.589035
| 6,434
| 46,257
| 4.052533
| 0.061859
| 0.044182
| 0.06029
| 0.045869
| 0.882297
| 0.866534
| 0.842333
| 0.829332
| 0.817596
| 0.799264
| 0
| 0.02413
| 0.273429
| 46,257
| 1,131
| 125
| 40.899204
| 0.751674
| 0.197159
| 0
| 0.77036
| 0
| 0
| 0.036436
| 0
| 0
| 0
| 0
| 0
| 0.004005
| 1
| 0.061415
| false
| 0
| 0.00267
| 0.028037
| 0.125501
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d5e3d16cabd436975de83388f5b74de23fa2ede
| 6,840
|
py
|
Python
|
tests/processor/test_process_record.py
|
cinchio/transform-singer
|
5cc77a6af2e2c34cbfdb308ad033ac431370edea
|
[
"Apache-2.0"
] | null | null | null |
tests/processor/test_process_record.py
|
cinchio/transform-singer
|
5cc77a6af2e2c34cbfdb308ad033ac431370edea
|
[
"Apache-2.0"
] | null | null | null |
tests/processor/test_process_record.py
|
cinchio/transform-singer
|
5cc77a6af2e2c34cbfdb308ad033ac431370edea
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from unittest.mock import MagicMock, patch
from transform_singer.processor import Processor
class TestProcessMapping(unittest.TestCase):
@patch("transform_singer.processor.singer.write_record")
def test_simple_stream(self, write_record):
args = MagicMock()
args.config = {
"mappings": {
"facilities": [
{
"stream": "location",
"properties": {
"name": {"type": "record", "key": "name"},
},
}
],
}
}
processor = Processor(args)
processor.process_record("facilities", {"name": "Foo"})
write_record.assert_called_once_with("location", {"name": "Foo"})
@patch("transform_singer.processor.singer.write_record")
def test_nested_child_stream(self, write_record):
args = MagicMock()
args.config = {
"mappings": {
"facilities.children": [
{
"stream": "location",
"properties": {
"name": {"type": "record", "key": "name"},
},
}
],
}
}
processor = Processor(args)
processor.process_record(
"facilities",
{
"children": [
{"name": "Joe"},
{"name": "Bob"},
{"name": "Ann"},
]
},
)
self.assertEqual(len(write_record.call_args_list), 3)
write_record.assert_any_call("location", {"name": "Joe"})
write_record.assert_any_call("location", {"name": "Bob"})
write_record.assert_any_call("location", {"name": "Ann"})
@patch("transform_singer.processor.singer.write_record")
def test_nested_grandchild_stream(self, write_record):
args = MagicMock()
args.config = {
"mappings": {
"facilities.children.grandchildren": [
{
"stream": "location",
"properties": {
"name": {"type": "record", "key": "name"},
"parent": {"type": "record", "key": "@parent.name"},
"grandparent": {
"type": "record",
"key": "@parent.@parent.name",
},
"rootparent": {"type": "record", "key": "@root.name"},
},
}
],
}
}
processor = Processor(args)
processor.process_record(
"facilities",
{
"name": "Jared",
"children": {
"name": "Mary",
"grandchildren": [
{"name": "Joe"},
{"name": "Bob"},
{"name": "Ann"},
],
},
},
)
self.assertEqual(len(write_record.call_args_list), 3)
write_record.assert_any_call(
"location",
{
"name": "Joe",
"parent": "Mary",
"grandparent": "Jared",
"rootparent": "Jared",
},
)
write_record.assert_any_call(
"location",
{
"name": "Bob",
"parent": "Mary",
"grandparent": "Jared",
"rootparent": "Jared",
},
)
write_record.assert_any_call(
"location",
{
"name": "Ann",
"parent": "Mary",
"grandparent": "Jared",
"rootparent": "Jared",
},
)
@patch("transform_singer.processor.singer.write_record")
def test_extreme_nested_stream(self, write_record):
args = MagicMock()
args.config = {
"mappings": {
"facilities.children.children": [
{
"stream": "location",
"properties": {
"name": {"type": "record", "key": "name"},
"parent": {"type": "record", "key": "@parent.name"},
"grandparent": {
"type": "record",
"key": "@parent.@parent.name",
},
"rootparent": {"type": "record", "key": "@root.name"},
},
}
],
}
}
processor = Processor(args)
processor.process_record(
"facilities",
{
"name": "Grandperson",
"spouse": "Grandspouse",
"children": [
{
"name": "Parent1",
"spouse": "Spouse1",
"children": [
{"name": "Child1A"},
{"name": "Child1B"},
],
},
{
"name": "Parent2",
"spouse": "Spouse2",
"children": [
{"name": "Child2A"},
{"name": "Child2B"},
],
},
],
},
)
self.assertEqual(len(write_record.call_args_list), 4)
write_record.assert_any_call(
"location",
{
"name": "Child1A",
"parent": "Parent1",
"grandparent": "Grandperson",
"rootparent": "Grandperson",
},
)
write_record.assert_any_call(
"location",
{
"name": "Child1B",
"parent": "Parent1",
"grandparent": "Grandperson",
"rootparent": "Grandperson",
},
)
write_record.assert_any_call(
"location",
{
"name": "Child2A",
"parent": "Parent2",
"grandparent": "Grandperson",
"rootparent": "Grandperson",
},
)
write_record.assert_any_call(
"location",
{
"name": "Child2B",
"parent": "Parent2",
"grandparent": "Grandperson",
"rootparent": "Grandperson",
},
)
| 31.962617
| 82
| 0.368129
| 406
| 6,840
| 6.027094
| 0.165025
| 0.098897
| 0.07642
| 0.081733
| 0.818962
| 0.818962
| 0.774009
| 0.759297
| 0.708214
| 0.659992
| 0
| 0.005557
| 0.500146
| 6,840
| 213
| 83
| 32.112676
| 0.710149
| 0
| 0
| 0.515
| 0
| 0
| 0.221199
| 0.035819
| 0
| 0
| 0
| 0
| 0.07
| 1
| 0.02
| false
| 0
| 0.015
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d72556eed45d622ac5a64e9b1c545e295201ad0
| 1,366
|
py
|
Python
|
MolecularRepresentation/aodbw.py
|
MooersLab/jupyterlabpymolpysnipsplus
|
b886750d63372434df53d4d6d7cdad6cb02ae4e7
|
[
"MIT"
] | null | null | null |
MolecularRepresentation/aodbw.py
|
MooersLab/jupyterlabpymolpysnipsplus
|
b886750d63372434df53d4d6d7cdad6cb02ae4e7
|
[
"MIT"
] | null | null | null |
MolecularRepresentation/aodbw.py
|
MooersLab/jupyterlabpymolpysnipsplus
|
b886750d63372434df53d4d6d7cdad6cb02ae4e7
|
[
"MIT"
] | null | null | null |
# Description: Ambient occlussion in grayscale with carbon atoms colored black. Note: requires the gscale() function from pymolshortcuts.py. Download this script from http://GitHub.com/MooersLab/pymolshortcuts. Load the functions from this script with the command "cmd.do("run pymolshortcuts.py").
# Source: placeHolder
"""
cmd.do('set_color oxygen, [1.0,0.4,0.4];')
cmd.do('set_color nitrogen, [0.5,0.5,1.0];')
cmd.do('remove solvent;')
cmd.do('as spheres;')
cmd.do('util.cbaw;')
cmd.do('bg white;')
cmd.do('gscale();')
cmd.do('set light_count,10;')
cmd.do('set spec_count,1;')
cmd.do('set shininess, 10;')
cmd.do('set specular,0.25;')
cmd.do('set ambient,0;')
cmd.do('set direct,0;')
cmd.do('set reflect,1.5;')
cmd.do('set ray_shadow_decay_factor, 0.1;')
cmd.do('set ray_shadow_decay_range, 2;')
cmd.do('set depth_cue, 0;')
cmd.do('ray;')
"""
cmd.do('set_color oxygen, [1.0,0.4,0.4];')
cmd.do('set_color nitrogen, [0.5,0.5,1.0];')
cmd.do('remove solvent;')
cmd.do('as spheres;')
cmd.do('util.cbaw;')
cmd.do('bg white;')
cmd.do('gscale();')
cmd.do('set light_count,10;')
cmd.do('set spec_count,1;')
cmd.do('set shininess, 10;')
cmd.do('set specular,0.25;')
cmd.do('set ambient,0;')
cmd.do('set direct,0;')
cmd.do('set reflect,1.5;')
cmd.do('set ray_shadow_decay_factor, 0.1;')
cmd.do('set ray_shadow_decay_range, 2;')
cmd.do('set depth_cue, 0;')
cmd.do('ray;')
| 31.767442
| 298
| 0.683748
| 255
| 1,366
| 3.576471
| 0.258824
| 0.202851
| 0.210526
| 0.057018
| 0.717105
| 0.717105
| 0.717105
| 0.717105
| 0.717105
| 0.717105
| 0
| 0.04498
| 0.08858
| 1,366
| 42
| 299
| 32.52381
| 0.68755
| 0.611274
| 0
| 0
| 0
| 0
| 0.611111
| 0.090038
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5d7530412cb253e8a2b46cbfc89319d0329bbeb1
| 70,959
|
py
|
Python
|
New folder/New folder/pyray-master/videos/tetartoid.py
|
JVuns/Driver-Log-Recorder---Final-Project
|
efbb2f67aee2db98fc2310617e589e845fc6c27d
|
[
"MIT"
] | 715
|
2018-01-13T04:29:10.000Z
|
2022-03-24T12:15:08.000Z
|
videos/tetartoid.py
|
Warlockk/pyray
|
cce6a6289ef9f2b0f92000847a04178ec7287520
|
[
"MIT"
] | 8
|
2018-01-14T07:48:41.000Z
|
2020-07-14T09:56:27.000Z
|
videos/tetartoid.py
|
Warlockk/pyray
|
cce6a6289ef9f2b0f92000847a04178ec7287520
|
[
"MIT"
] | 111
|
2018-01-13T06:47:24.000Z
|
2021-04-01T05:58:28.000Z
|
import numpy as np
from PIL import Image, ImageDraw, ImageFont, ImageMath
from pyray.shapes.solid.polyhedron import *
from pyray.axes import *
from pyray.rotation import *
from pyray.misc import zigzag2
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
basedir = '..\\images\\RotatingCube\\'
'''
Scene 1 - The following figure is a Tetartoid.
'''
txt = "The following figure is a Tetartoid."
tt = Tetartoid(0.45,0.08)
for i in range(0, 39):
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
#ffmpeg -framerate 10 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 2 - It has these irregular pentagons as its
faces, which is an element of asymmetry.
'''
txt = "Its faces are irregular pentagons,\n an element of asymmetry."
tt = Tetartoid(0.45,0.08)
for i in range(39, 39+46):
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
pl = tt.planes[int((i-39)/5)%12]
render_solid_planes([pl], draw, r, shift=np.array([1000, 1000, 0]),
scale=1200, h=170,s=100)
writeStaggeredText(txt, draw, i-39, speed=2)
im.save(basedir + "im" + str(i-39) + ".png")
#ffmpeg -framerate 9 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 3 - But then all its faces are the same
pentagon which is rather symmetric.
'''
txt = "But then each face is the\nsame pentagon, making\nit rather symmetric."
tt = Tetartoid(0.45,0.08)
for i in range(84, 84+46):
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
for j in range(min(int((i-84)/2),12)):
pl = tt.planes[:j]
render_solid_planes(pl, draw, r, shift=np.array([1000, 1000, 0]),
scale=1200, h=170,s=100)
writeStaggeredText(txt, draw, i-84, speed=2)
im.save(basedir + "im" + str(i-84) + ".png")
#ffmpeg -framerate 7 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 4
'''
txt = "It's these element\nof symmetry and asymmetry\ncoexisting that makes\nit fascinating."
tt = Tetartoid(0.45,0.08)
for i in range(129, 129+56):
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
#tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
for j in range(12):
pl = tt.planes[:j]
render_solid_planes(pl, draw, r, shift=np.array([1000, 1000, 0]),
scale=1200, h=170,s=100)
for j in range(min(int((i-129)/2),12)):
pl = tt.planes[:j]
render_solid_planes(pl, draw, r, shift=np.array([1000, 1000, 0]),
scale=1200)
writeStaggeredText(txt, draw, i-129, speed=2)
im.save(basedir + "im" + str(i-129) + ".png")
#ffmpeg -framerate 6 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 5
'''
txt = "This is not one solid\nbut a whole\nfamily of them."
for i in range(184, 184+56):
s = zigzag2((i-184)*.033, .45, .49, .13)
t = 0.08
if i-184>18:
t = zigzag2((i-184-18)*.005, .08, .1, .01)
tt = Tetartoid(s, t)
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
writeStaggeredText(txt, draw, i-184, speed=2)
im.save(basedir + "im" + str(i-184) + ".png")
#ffmpeg -framerate 6 -f image2 -i im%d.png -vb 20M vid.avi
s_old = 0.1549999999999998
t_old = 0.08499999999999998
'''
Scene 6
'''
txt = "One member of the family\nhas all regular pentagons."
for i in range(239, 239+56):
s = s_old + (0.404508-s_old)*(i-239)/56
t = t_old + (0.0954913-t_old)*(i-239)/56
tt = Tetartoid(s, t)
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
writeStaggeredText(txt, draw, i-239, speed=2)
im.save(basedir + "im" + str(i-239) + ".png")
#ffmpeg -framerate 9 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 7
'''
txt = "It's called a Dodecahedron"
for i in range(294, 294+31):
s = 0.404508
t = 0.0954913
tt = Tetartoid(s, t)
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
writeStaggeredText(txt, draw, i-294, speed=2)
im.save(basedir + "im" + str(i-294) + ".png")
#ffmpeg -framerate 7 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 8
'''
base_i = 324
txt = "To construct it,\nwe start with one of the pentagons."
for i in range(base_i, base_i+51):
s = 0.404508
t = 0.0954913
tt = Tetartoid(s, t)
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/90)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
#tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=1200)
render_solid_planes([tt.planes[0]], draw, r, shift=np.array([1000, 1000, 0]),
scale=1200)
for j in range(1,12):
pl = np.copy(tt.planes[j])
pl_center = np.sum(pl,axis=0)
pl = pl + pl_center*(i-base_i)/40
render_solid_planes([pl], draw, r, shift=np.array([1000, 1000, 0]),
scale=1200)
writeStaggeredText(txt, draw, i-base_i, speed=2)
im.save(basedir + "im" + str(i-base_i) + ".png")
#ffmpeg -framerate 7 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 9
'''
base_i=374
txt = "Duplicate by rotating\nabout the vertices."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*base_i/90)
plane = np.copy(tt.planes[0])
#plane = np.array([[0,1,0],[np.cos(np.pi/3),-np.sin(np.pi/3),0],[-np.cos(np.pi/3), -np.sin(np.pi/3),0]])
plane = np.dot(plane,r_base)
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
plane_e = np.append(plane,np.ones(5)[...,None],1)
#plane_e = np.append(plane,np.ones(3)[...,None],1)
for i in range(base_i, base_i+31):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
#r = axis_rotation(plane_e[1],plane_e[0],2*np.pi*(i-base_i)/30)
r = axis_rotation(plane[0],plane[0]+plane_per,2*np.pi*min((i-base_i),21)/30)
plane1 = np.dot(r,plane_e.T).T
plane1 = plane1[:,np.array([0,1,2])]
render_solid_planes([plane],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane1],draw, np.eye(3),cut_back_face=False,
scale=1200)
writeStaggeredText(txt, draw, i-base_i, speed=2)
im.save(basedir + "im" + str(i-base_i) + ".png")
#ffmpeg -framerate 6 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 10
'''
base_i = 404
txt = "Duplicate by rotating\nabout the vertices."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*374/90)
plane = np.copy(tt.planes[0])
plane = np.dot(plane,r_base)
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
plane_e = np.append(plane,np.ones(5)[...,None],1)
r = axis_rotation(plane[0],plane[0]+plane_per,2*np.pi*21/30)
plane1 = np.dot(r,plane_e.T).T
plane1 = plane1[:,np.array([0,1,2])]
for i in range(base_i, base_i+31):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
r = axis_rotation(plane[1],plane[1]+plane_per,2*np.pi*min((i-base_i),21)/30)
plane2 = np.dot(r,plane_e.T).T
plane2 = plane2[:,np.array([0,1,2])]
render_solid_planes([plane],draw, np.eye(3),cut_back_face=False,
scale=1200,make_edges=True)
render_solid_planes([plane1],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane2],draw, np.eye(3),cut_back_face=False,
scale=1200)
writeStaggeredText(txt, draw, 32, speed=2)
im.save(basedir + "im" + str(i-base_i) + ".png")
#ffmpeg -framerate 8 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 11
'''
base_i = 434
txt = "Duplicate by rotating\nabout the vertices."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*374/90)
plane = np.copy(tt.planes[0])
plane = np.dot(plane,r_base)
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
plane_e = np.append(plane,np.ones(5)[...,None],1)
r = axis_rotation(plane[0],plane[0]+plane_per,2*np.pi*21/30)
plane1 = np.dot(r,plane_e.T).T
plane1 = plane1[:,np.array([0,1,2])]
r = axis_rotation(plane[1],plane[1]+plane_per,2*np.pi*21/30)
plane2 = np.dot(r,plane_e.T).T
plane2 = plane2[:,np.array([0,1,2])]
for i in range(base_i, base_i+22):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
r = axis_rotation(plane[2],plane[2]+plane_per,2*np.pi*min((i-base_i),21)/30)
plane3 = np.dot(r,plane_e.T).T
plane3 = plane3[:,np.array([0,1,2])]
render_solid_planes([plane],draw, np.eye(3),cut_back_face=False,
scale=1200,make_edges=True)
render_solid_planes([plane1],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane2],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane3],draw, np.eye(3),cut_back_face=False,
scale=1200)
writeStaggeredText(txt, draw, 32, speed=2)
im.save(basedir + "im" + str(i-base_i) + ".png")
#ffmpeg -framerate 12 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 12
'''
base_i = 455
txt = "Duplicate by rotating\nabout the vertices."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*374/90)
plane = np.copy(tt.planes[0])
plane = np.dot(plane,r_base)
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
plane_e = np.append(plane,np.ones(5)[...,None],1)
r = axis_rotation(plane[0],plane[0]+plane_per,2*np.pi*21/30)
plane1 = np.dot(r,plane_e.T).T
plane1 = plane1[:,np.array([0,1,2])]
r = axis_rotation(plane[1],plane[1]+plane_per,2*np.pi*21/30)
plane2 = np.dot(r,plane_e.T).T
plane2 = plane2[:,np.array([0,1,2])]
r = axis_rotation(plane[2],plane[2]+plane_per,2*np.pi*21/30)
plane3 = np.dot(r,plane_e.T).T
plane3 = plane3[:,np.array([0,1,2])]
for i in range(base_i, base_i+22):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
r = axis_rotation(plane[3],plane[3]+plane_per,2*np.pi*min((i-base_i),21)/30)
plane4 = np.dot(r,plane_e.T).T
plane4 = plane4[:,np.array([0,1,2])]
render_solid_planes([plane],draw, np.eye(3),cut_back_face=False,
scale=1200,make_edges=True)
render_solid_planes([plane1],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane2],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane3],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane4],draw, np.eye(3),cut_back_face=False,
scale=1200)
writeStaggeredText(txt, draw, 32, speed=2)
im.save(basedir + "im" + str(i-base_i) + ".png")
#ffmpeg -framerate 20 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 13
'''
base_i = 476
txt = "Duplicate by rotating\nabout the vertices."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*374/90)
plane = np.copy(tt.planes[0])
plane = np.dot(plane,r_base)
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
plane_e = np.append(plane,np.ones(5)[...,None],1)
r = axis_rotation(plane[0],plane[0]+plane_per,2*np.pi*21/30)
plane1 = np.dot(r,plane_e.T).T
plane1 = plane1[:,np.array([0,1,2])]
r = axis_rotation(plane[1],plane[1]+plane_per,2*np.pi*21/30)
plane2 = np.dot(r,plane_e.T).T
plane2 = plane2[:,np.array([0,1,2])]
r = axis_rotation(plane[2],plane[2]+plane_per,2*np.pi*21/30)
plane3 = np.dot(r,plane_e.T).T
plane3 = plane3[:,np.array([0,1,2])]
r = axis_rotation(plane[3],plane[3]+plane_per,2*np.pi*21/30)
plane4 = np.dot(r,plane_e.T).T
plane4 = plane4[:,np.array([0,1,2])]
for i in range(base_i, base_i+22):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
r = axis_rotation(plane[4],plane[4]+plane_per,2*np.pi*min((i-base_i),21)/30)
plane5 = np.dot(r,plane_e.T).T
plane5 = plane5[:,np.array([0,1,2])]
render_solid_planes([plane],draw, np.eye(3),cut_back_face=False,
scale=1200,make_edges=True)
render_solid_planes([plane1],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane2],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane3],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane4],draw, np.eye(3),cut_back_face=False,
scale=1200)
render_solid_planes([plane5],draw, np.eye(3),cut_back_face=False,
scale=1200)
writeStaggeredText(txt, draw, 32, speed=2)
im.save(basedir + "im" + str(i-base_i) + ".png")
#ffmpeg -framerate 25 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 14
'''
base_i = 497
txt = "Then rotate them in\nto form a bowl."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*374/90)
plane = np.copy(tt.planes[0])
plane = np.dot(plane,r_base)
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
plane_e = np.append(plane,np.ones(5)[...,None],1)
r = axis_rotation(plane[0],plane[0]+plane_per,2*np.pi*21/30)
plane1 = np.dot(r,plane_e.T).T
#plane1 = plane1[:,np.array([0,1,2])]
r = axis_rotation(plane[1],plane[1]+plane_per,2*np.pi*21/30)
plane2 = np.dot(r,plane_e.T).T
#plane2 = plane2[:,np.array([0,1,2])]
r = axis_rotation(plane[2],plane[2]+plane_per,2*np.pi*21/30)
plane3 = np.dot(r,plane_e.T).T
#plane3 = plane3[:,np.array([0,1,2])]
r = axis_rotation(plane[3],plane[3]+plane_per,2*np.pi*21/30)
plane4 = np.dot(r,plane_e.T).T
#plane4 = plane4[:,np.array([0,1,2])]
r = axis_rotation(plane[4],plane[4]+plane_per,2*np.pi*21/30)
plane5 = np.dot(r,plane_e.T).T
#plane5 = plane5[:,np.array([0,1,2])]
for i in range(base_i, base_i+31):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
render_solid_planes([plane],draw, np.eye(3),cut_back_face=False,
scale=1200,make_edges=True)
r = axis_rotation(plane1[0],plane1[4],108/180*np.pi*min((i-base_i),18)/30)
plane1_dr = np.dot(r,plane1.T).T
plane1_dr = plane1_dr[:,np.array([0,1,2])]
render_solid_planes([plane1_dr],draw, np.eye(3),cut_back_face=False,
scale=1200)
r = axis_rotation(plane2[1],plane2[0],108/180*np.pi*min((i-base_i),18)/30)
plane2_dr = np.dot(r,plane2.T).T
plane2_dr = plane2_dr[:,np.array([0,1,2])]
render_solid_planes([plane2_dr],draw, np.eye(3),cut_back_face=False,
scale=1200)
r = axis_rotation(plane3[2],plane3[1],108/180*np.pi*min((i-base_i),18)/30)
plane3_dr = np.dot(r,plane3.T).T
plane3_dr = plane3_dr[:,np.array([0,1,2])]
render_solid_planes([plane3_dr],draw, np.eye(3),cut_back_face=False,
scale=1200)
r = axis_rotation(plane4[3],plane4[2],108/180*np.pi*min((i-base_i),18)/30)
plane4_dr = np.dot(r,plane4.T).T
plane4_dr = plane4_dr[:,np.array([0,1,2])]
render_solid_planes([plane4_dr],draw, np.eye(3),cut_back_face=False,
scale=1200)
r = axis_rotation(plane5[4],plane5[3],108/180*np.pi*min((i-base_i),18)/30)
plane5_dr = np.dot(r,plane5.T).T
plane5_dr = plane5_dr[:,np.array([0,1,2])]
render_solid_planes([plane5_dr],draw, np.eye(3),cut_back_face=False,
scale=1200)
writeStaggeredText(txt, draw, 2*(i-base_i), speed=2)
im.save(basedir + "im" + str(i-base_i) + ".png")
'''
Scene 15
'''
txt = "Duplicate the bowl."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*374/90)
plane = np.copy(tt.planes[0])
#plane = np.dot(plane,r_base)
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
for i in range(21):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
nu_planes = np.copy(tt.planes[[0,1,2,3,5,6]])
nu_planes -= plane_per*min(i,10)*3
render_solid_planes(nu_planes, draw, r_base, cut_back_face=False,
scale=1200-i*24)
render_solid_planes(tt.planes[[0,1,2,3,5,6]], draw, r_base, cut_back_face=False,
scale=1200-i*24)
writeStaggeredText(txt, draw, 2*i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
#ffmpeg -framerate 5.5 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 16
'''
## First, we need to fine the angle between adjacent faces of a Dodecahedron.
angle = angle_btw_planes(tt.planes[0], tt.planes[1])
txt = "Turn the second one\ninside out."
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*374/90)
plane = np.copy(tt.planes[0])
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
for i in range(21):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
nu_planes = np.copy(tt.planes[[0,1,2,3,5,6]])
nu_planes -= plane_per*30
nu_planes[1] = rotate_plane(nu_planes[1],nu_planes[1][0], nu_planes[1][4],-2*angle*i/20)
nu_planes[2] = rotate_plane(nu_planes[2],nu_planes[2][1], nu_planes[2][0],-2*angle*i/20)
nu_planes[3] = rotate_plane(nu_planes[3],nu_planes[3][2], nu_planes[3][1],-2*angle*i/20)
nu_planes[4] = rotate_plane(nu_planes[4],nu_planes[4][4], nu_planes[4][3],-2*angle*i/20)
nu_planes[5] = rotate_plane(nu_planes[5],nu_planes[5][3], nu_planes[5][2],-2*angle*i/20)
render_solid_planes(nu_planes, draw, r_base, cut_back_face=False,
scale=720)
render_solid_planes(tt.planes[[0,1,2,3,5,6]], draw, r_base, cut_back_face=False,
scale=720)
writeStaggeredText(txt, draw, 2*i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
#ffmpeg -framerate 5.5 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 17
'''
angle = angle_btw_planes(tt.planes[0], tt.planes[1])
txt = "And then connect the two."
plane = np.copy(tt.planes[0])
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
for i in range(58):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(374-i)/90)
nu_planes = np.copy(tt.planes[[0,1,2,3,5,6]])
nu_planes -= plane_per*(30-i/3)
nu_planes[1] = rotate_plane(nu_planes[1],nu_planes[1][0], nu_planes[1][4],-2*angle)
nu_planes[2] = rotate_plane(nu_planes[2],nu_planes[2][1], nu_planes[2][0],-2*angle)
nu_planes[3] = rotate_plane(nu_planes[3],nu_planes[3][2], nu_planes[3][1],-2*angle)
nu_planes[4] = rotate_plane(nu_planes[4],nu_planes[4][4], nu_planes[4][3],-2*angle)
nu_planes[5] = rotate_plane(nu_planes[5],nu_planes[5][3], nu_planes[5][2],-2*angle)
render_solid_planes_back_first(nu_planes, draw, r_base, cut_back_face=False,
scale=720)
render_solid_planes_back_first(tt.planes[[0,1,2,3,5,6]], draw, r_base, cut_back_face=True,
scale=720)
writeStaggeredText(txt, draw, 2*i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
#ffmpeg -framerate 12 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 18
'''
angle = angle_btw_planes(tt.planes[0], tt.planes[1])
txt = "And then connect the two."
plane = np.copy(tt.planes[0])
plane_per = np.cross((plane[0]-plane[1]),(plane[1]-plane[2]))
plane_cen = np.mean(plane,axis=0)
for i in range(21):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(374-57)/90)
nu_planes = np.copy(tt.planes[[0,1,2,3,5,6]])
nu_planes -= plane_per*(30-57/3-float(i)/9.0)
nu_planes[0] = rotate_plane(nu_planes[0],plane_cen, plane_cen+plane_per,i*np.pi/5/20)
nu_planes[1] = rotate_plane(nu_planes[1],nu_planes[1][0], nu_planes[1][4],-2*angle)
nu_planes[1] = rotate_plane(nu_planes[1],plane_cen, plane_cen+plane_per,i*np.pi/5/20)
nu_planes[2] = rotate_plane(nu_planes[2],nu_planes[2][1], nu_planes[2][0],-2*angle)
nu_planes[2] = rotate_plane(nu_planes[2],plane_cen, plane_cen+plane_per,i*np.pi/5/20)
nu_planes[3] = rotate_plane(nu_planes[3],nu_planes[3][2], nu_planes[3][1],-2*angle)
nu_planes[3] = rotate_plane(nu_planes[3],plane_cen, plane_cen+plane_per,i*np.pi/5/20)
nu_planes[4] = rotate_plane(nu_planes[4],nu_planes[4][4], nu_planes[4][3],-2*angle)
nu_planes[4] = rotate_plane(nu_planes[4],plane_cen, plane_cen+plane_per,i*np.pi/5/20)
nu_planes[5] = rotate_plane(nu_planes[5],nu_planes[5][3], nu_planes[5][2],-2*angle)
nu_planes[5] = rotate_plane(nu_planes[5],plane_cen, plane_cen+plane_per,i*np.pi/5/20)
render_solid_planes_back_first(nu_planes, draw, r_base, cut_back_face=False,
scale=720+i/20*(1200-720))
render_solid_planes_back_first(tt.planes[[0,1,2,3,5,6]], draw, r_base, cut_back_face=False,
scale=720+i/20*(1200-720))
writeStaggeredText(txt, draw, 23, speed=2)
im.save(basedir + "im" + str(i) + ".png")
for i in range(21,27):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r_base, shift=np.array([1000, 1000, 0]), scale=1200)
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 19
'''
txt = "The Tetartoid can be\nconstructed in a similar manner\nbut here we will start with\na Tetrahedron."
for i in range(51):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*max((1-i/20),0)
tt = Tetartoid(s, t)
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(317+i)/90)
tt.render_solid_planes(draw, r_base, shift=np.array([1000, 1000, 0]), scale=1200)
writeStaggeredText(txt, draw, i, speed=3)
im.save(basedir + "im" + str(i) + ".png")
#ffmpeg -framerate 7 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 20
'''
txt = "The Tetartoid can be\nconstructed in a similar manner\nbut here we will start with\na Tetrahedron."
for i in range(51):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*0
tt = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+i)/90)
#tt.render_solid_planes(draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=True,trnsp=int(255*(1-i/50)))
render_solid_planes(tt.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=False,trnsp=int(255*(1-i/50)))
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=int(255*(1-i/50)))
writeStaggeredText(txt, draw, 60, speed=3)
im.save(basedir + "im" + str(i) + ".png")
#ffmpeg -framerate 8 -f image2 -i im%d.png -vb 20M vid.avi
'''
Scene 21
'''
txt = "First, we take two points\non each side, at distance s\nfrom the end-points."
for i in range(51):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508*(i/50)
t = 0.0954913
tt = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
#render_solid_planes(tt.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
for ed in tet.edges:
pt1 = ed[0]*s+ed[1]*(1-s)
pt2 = ed[1]*s+ed[0]*(1-s)
pt1 = np.dot(r_base.T,pt1)*1200+np.array([1000,1000,0])
pt2 = np.dot(r_base.T,pt2)*1200+np.array([1000,1000,0])
draw.ellipse((pt1[0]-5, pt1[1]-5, pt1[0]+5, pt1[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2[0]-5, pt2[1]-5, pt2[0]+5, pt2[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 22
'''
txt = "Consider the plane joining the\nedge to the center"
for i in range(31):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913
tt = Tetartoid(s, 0)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*min(i,27)/90)
r_base = np.dot(r_base, r_base_2)
#render_solid_planes(tt.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
for i1 in range(4):
for j1 in range(i1+1,4):
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
p = 0.05 + i/30.0*0.95
cent_pt = (tet.vertices[0]+tet.vertices[3])/2*(1-p) + np.array([0,0,0])*p
#cent_pt = np.array([0,0,0])
face1 = np.array([cent_pt, tet.vertices[0], tet.vertices[3]])
render_solid_planes([face1], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=180)
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 23
'''
txt = "Move the two points\nperpendicular to the plane\na distance t on either side."
for i in range(51):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*min(1,i/30)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*max(0,27-i)/90)
r_base = np.dot(r_base, r_base_2)
#render_solid_planes(tt.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[3])/2*(1-p) + np.array([0,0,0])*p
#cent_pt = np.array([0,0,0])
face1 = np.array([cent_pt, tet.vertices[0], tet.vertices[3]])
render_solid_planes([face1], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=180)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 24
'''
txt = "Similarly, move the points\nfor the bottom edge."
for i in range(35):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*min(1,i/30)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, 0.0954913)
tt2 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[2]), 2*np.pi*zigzag2(i,0,17,0)/90)
r_base = np.dot(r_base, r_base_2)
#render_solid_planes(tt2.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face1 = np.array([cent_pt, tet.vertices[0], tet.vertices[2]])
render_solid_planes([face1], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=180)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt2.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt2.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
###############
## And join the three closest red points - missing.
'''
Scene 26
'''
txt = "Join the left edge\nto center of\nfront face of Tetrahedron."
for i in range(41):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*min(1,i/30)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, 0.0954913)
tt2 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*min(i,13)/90)
r_base = np.dot(r_base, r_base_2)
face1 = tet.planes[1]
face1_cen = sum(tet.planes[1])/3
p1 = min(1,(i+3)/34)
p2 = min(1,(i+1)/34)
face1_1 = face1*p1 + face1_cen*(1-p1)
face1_2 = face1*p2 + face1_cen*(1-p2)
render_solid_planes([face1_1], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=False,trnsp=70,cut_back_face=False)
render_solid_planes([face1_2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=False,trnsp=70,h=0,s=0,cut_back_face=False,rgb=(0,0,0))
render_solid_planes([tet.planes[0]], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
#render_solid_planes(tt2.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.qs[0,2]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
face_cent = (tet.vertices[0]+tet.vertices[2]+tet.vertices[3])/3
face3 = [tt1.qs[3,0], tt1.qs[0,2], face_cent]
render_solid_planes([face3], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=False,cut_back_face=False,trnsp=int(200*i/34))
p = min(1,i/34)
for i1 in range(3):
pt1 = np.dot(r_base.T,face3[i1])*1200+np.array([1000,1000,0])
pt2 = np.dot(r_base.T,face3[(i1+1)%3])*1200+np.array([1000,1000,0])
pt2 = p*pt2+(1-p)*pt1
draw.line((pt1[0],pt1[1],pt2[0],pt2[1]), fill = (0,0,255), width = 5)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 27
'''
txt = "Extend a line from body center to face center\nuntil the two planes become one."
for i in range(51):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*min(1,i/30)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, 0.0954913)
tt2 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*13/90)
r_base = np.dot(r_base, r_base_2)
render_solid_planes([tet.planes[0]], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.qs[0,2]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=(i<30),cut_back_face=False,trnsp=int(200*34/34))
face_cent = (tet.vertices[0]+tet.vertices[2]+tet.vertices[3])/3
p = min(1,i/30)
face_cent = face_cent*(1-p) + tt1.cs[1]*p
face3 = [tt1.qs[3,0], tt1.qs[0,2], face_cent]
render_solid_planes([face3], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=(i<30),cut_back_face=False,trnsp=int(200*(0.5+0.5*min(i/30,1))))
pt = np.dot(r_base.T,face_cent)*1200+np.array([1000,1000,0])
draw.line((1000,1000,pt[0],pt[1]), fill = (255,255,255), width = 5)
draw.ellipse((1000-10,1000-10,1000+10,1000+10),fill='purple', outline='blue')
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 28
'''
txt = "And do the same thing with the bottom vertex."
for i in range(51):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*min(1,i/30)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, 0.0954913)
tt2 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*13/90)
r_base = np.dot(r_base, r_base_2)
render_solid_planes([tet.planes[0]], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.cs[1], tt1.qs[0,2]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=False,cut_back_face=False,trnsp=int(200*34/34))
vert = tet.vertices[0]
p = min(1,i/40)
vert = vert*(1-p) + tt1.vs[0]*p
face3 = [tt1.qs[0,3], tt1.qs[0,2], vert]
render_solid_planes([face3], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=(i<30),cut_back_face=False,trnsp=int(200*(0.5+0.5*min(i/30,1))))
pt = np.dot(r_base.T,vert)*1200+np.array([1000,1000,0])
draw.line((1000,1000,pt[0],pt[1]), fill = (255,255,255), width = 5)
draw.ellipse((1000-10,1000-10,1000+10,1000+10),fill='purple', outline='blue')
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 29
'''
txt = "The pentagon here is regular."
for i in range(21):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*min(1,i/30)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, 0.0954913)
tt2 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*(13-min(i,11))/90)
r_base = np.dot(r_base, r_base_2)
render_solid_planes([tet.planes[0]], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.cs[1], tt1.qs[0,2], tt1.vs[0]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
writeStaggeredText("s = 0.404\nt = 0.095", draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 30
'''
txt = "But other values of s and t\ngive a wide range of pentagons."
for i in range(61):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
if i < 40:
s = 0.404508*zigzag2(i,100,120,70)/100.0
else:
s = 0.404508
if i < 40:
t = 0.0954913
else:
t = 0.0954913*(1-(i-40)/30)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50)/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*(13-11)/90)
r_base = np.dot(r_base, r_base_2)
render_solid_planes([tet.planes[0]], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.cs[1], tt1.qs[0,2], tt1.vs[0]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 31
'''
txt = "We could construct the other faces\nin a similar manner\nbut lets use rotational symmetry."
for i in range(63):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913/3
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(367+50-min(i,57) )/90)
r_base_2 = general_rotation(np.dot(r_base.T,tet.vertices[3]), 2*np.pi*(2-min(i/10,2))/90)
#rot = tetrahedral_rotations(i/40)[0]
r_base_old = np.dot(r_base, r_base_2)
r_base = r_base_old
#r_base = np.dot(r_base_old, rot)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.cs[1], tt1.qs[0,2], tt1.vs[0]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
#render_solid_planes([face2], draw, r_base_old, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 32
'''
txt = "Certain rotations preserve the orientation\nof the Tetrahedron."
for i in range(26):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913/3
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
rot = tetrahedral_rotations(min(1,i/20))[0]
r_base_old = np.eye(3)
r_base = np.dot(r_base_old, rot)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.cs[1], tt1.qs[0,2], tt1.vs[0]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
#render_solid_planes([face2], draw, r_base_old, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
#writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 33
'''
txt = "Certain rotations preserve the orientation\nof the Tetrahedron."
for i in range(26):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913/3
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
rot = tetrahedral_rotations(max(0,1-i/20))[0]
r_base_old = np.eye(3)
r_base = np.dot(r_base_old, rot)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.cs[1], tt1.qs[0,2], tt1.vs[0]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
#render_solid_planes([face2], draw, r_base_old, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, 100, speed=2)
#writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 34
'''
txt = "If we duplicate the pentagons\nwe start getting the other\nfaces of the Tetartoid."
for j in range(11):
for i in range(11):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913/3
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
rot = tetrahedral_rotations(min(1,i/10))[j]
r_base_old = np.eye(3)
r_base = np.dot(r_base_old, rot)
#render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
# scale=1200, make_edges=False,trnsp=125)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
p = 1.0
cent_pt = (tet.vertices[0]+tet.vertices[2])/2*(1-p) + np.array([0,0,0])*p
face2 = [tt1.qs[0,3], tt1.qs[3,0], tt1.cs[1], tt1.qs[0,2], tt1.vs[0]]
render_solid_planes([face2], draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=int(200*34/34))
render_solid_planes([face2], draw, r_base_old, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=200)
for k in range(j):
rot1 = tetrahedral_rotations()[k]
render_solid_planes([face2], draw, rot1, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,cut_back_face=False,trnsp=200)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, j*11+i, speed=2)
im.save(basedir + "im" + str(j*11+i) + ".png")
'''
Scene 35
'''
txt = "We already saw how t=0\ngives back the Tetrahedron."
for i in range(35):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913/3*(1-min(i,20)/20)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(tet.vertices[3], 2*np.pi*(i+30)/90)
render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=False,trnsp=200)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 36
'''
txt = "And these special values\ngive us the Dodecahedron."
for i in range(35):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.404508
t = 0.0954913*(min(i,20)/20)
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(tet.vertices[3], 2*np.pi*(i+30+27)/90)
render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=False,trnsp=200)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 37
'''
txt = "s = 0.33 and t = 0.33 gives us the cube."
for i in range(45):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
p = min(1,i/30)
s = 0.404508*(1-p)+0.33*p
t = 0.0954913*(1-p)+0.33*p
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(tet.vertices[3], 2*np.pi*(i+30+27+34)/90)
render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=200)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 38
'''
txt = "Stays a cube"
for i in range(45):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = zigzag2(i,33,45,20)/100
t = zigzag2(i,33,45,20)/100
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(tet.vertices[3], 2*np.pi*(i+30+27+34+34+44)/90)
render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=200)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
'''
Scene 39
'''
txt = "Subscribe for more :)"
for i in range(45):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = zigzag2(i,33,45,20)/100
t = zigzag2(-i,33,45,20)/100
tt = Tetartoid(s, 0)
tt1 = Tetartoid(s, t)
tet = Tetrahedron()
r_base = general_rotation(tet.vertices[3], 2*np.pi*(i+30+27+34+34+44+44)/90)
render_solid_planes(tt1.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=200)
render_solid_planes(tet.planes, draw, r_base, shift=np.array([1000, 1000, 0]),\
scale=1200, make_edges=True,trnsp=0)
for i1 in range(4):
for j1 in range(i1+1,4):
if i1 == 0 and j1 == 3:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
if i1 == 0 and j1 == 2:
pt1_fin = np.dot(r_base.T,tt1.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt1.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='red', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='red', outline='yellow')
pt1_fin = np.dot(r_base.T,tt.qs[i1,j1])*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,tt.qs[j1,i1])*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
writeStaggeredText(txt, draw, i, speed=2)
#writeStaggeredText("s = " +"%.4f" % round(s,4)+ "\nt = "+ "%.4f" % round(t,4), draw, 100, speed=2,pos=(50,1000))
im.save(basedir + "im" + str(i) + ".png")
# Convert to video and gif.
#ffmpeg -framerate 10 -f image2 -i im%d.png -vb 20M vid.avi
#ffmpeg -i vid.avi -pix_fmt rgb24 -loop 0 out.gif
for i in range(51):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
s = 0.4
t = 0.33
tt = Tetartoid(s, t)
r_base = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*(317+i)/90)
tt.render_solid_planes(draw, r_base, shift=np.array([1000, 1000, 0]), scale=1200)
im.save(basedir + "im" + str(i) + ".png")
for ed in tet.edges:
pt1 = ed[0]*s+ed[1]*(1-s)
pt2 = ed[1]*s+ed[0]*(1-s)
pt1_fin = np.dot(r_base.T,pt1)*1200+np.array([1000,1000,0])
pt2_fin = np.dot(r_base.T,pt2)*1200+np.array([1000,1000,0])
draw.ellipse((pt1_fin[0]-5, pt1_fin[1]-5, pt1_fin[0]+5, pt1_fin[1]+5),fill='yellow', outline='yellow')
draw.ellipse((pt2_fin[0]-5, pt2_fin[1]-5, pt2_fin[0]+5, pt2_fin[1]+5),fill='yellow', outline='yellow')
| 48.702128
| 119
| 0.596852
| 12,945
| 70,959
| 3.148629
| 0.030205
| 0.014573
| 0.024044
| 0.068819
| 0.92095
| 0.916141
| 0.912486
| 0.909394
| 0.903702
| 0.897152
| 0
| 0.140028
| 0.197184
| 70,959
| 1,456
| 120
| 48.735577
| 0.575456
| 0.066165
| 0
| 0.803381
| 0
| 0.002669
| 0.056322
| 0.00104
| 0.005338
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007117
| 0
| 0.007117
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53724a07be899a1c7cb6dbe765d6f23ee6e0f63a
| 76,616
|
py
|
Python
|
sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 3
|
2019-02-21T20:46:26.000Z
|
2021-06-22T15:35:52.000Z
|
sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 11
|
2017-10-05T18:20:40.000Z
|
2020-10-10T09:20:19.000Z
|
sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 15
|
2017-10-02T18:48:20.000Z
|
2022-03-03T14:03:49.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import map_error
from .. import models
class PageBlobOperations(object):
"""PageBlobOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar x_ms_blob_type: Specifies the type of blob to create: block blob, page blob, or append blob. Constant value: "PageBlob".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
self.x_ms_blob_type = "PageBlob"
def create(self, content_length, blob_content_length, timeout=None, tier=None, metadata=None, blob_sequence_number=0, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Create operation creates a new page blob.
:param content_length: The length of the request.
:type content_length: long
:param blob_content_length: This header specifies the maximum size for
the page blob, up to 1 TB. The page blob size must be aligned to a
512-byte boundary.
:type blob_content_length: long
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param tier: Optional. Indicates the tier to be set on the page blob.
Possible values include: 'P4', 'P6', 'P10', 'P15', 'P20', 'P30',
'P40', 'P50', 'P60', 'P70', 'P80'
:type tier: str or
~azure.storage.blob.models.PremiumPageBlobAccessTier
:param metadata: Optional. Specifies a user-defined name-value pair
associated with the blob. If no name-value pairs are specified, the
operation will copy the metadata from the source blob or file to the
destination blob. If one or more name-value pairs are specified, the
destination blob is created with the specified metadata, and metadata
is not copied from the source blob or file. Note that beginning with
version 2009-09-19, metadata names must adhere to the naming rules for
C# identifiers. See Naming and Referencing Containers, Blobs, and
Metadata for more information.
:type metadata: str
:param blob_sequence_number: Set for page blobs only. The sequence
number is a user-controlled value that you can use to track requests.
The value of the sequence number must be between 0 and 2^63 - 1.
:type blob_sequence_number: long
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Additional parameters for the operation
:type cpk_info: ~azure.storage.blob.models.CpkInfo
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
blob_content_type = None
if blob_http_headers is not None:
blob_content_type = blob_http_headers.blob_content_type
blob_content_encoding = None
if blob_http_headers is not None:
blob_content_encoding = blob_http_headers.blob_content_encoding
blob_content_language = None
if blob_http_headers is not None:
blob_content_language = blob_http_headers.blob_content_language
blob_content_md5 = None
if blob_http_headers is not None:
blob_content_md5 = blob_http_headers.blob_content_md5
blob_cache_control = None
if blob_http_headers is not None:
blob_cache_control = blob_http_headers.blob_cache_control
blob_content_disposition = None
if blob_http_headers is not None:
blob_content_disposition = blob_http_headers.blob_content_disposition
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
encryption_key = None
if cpk_info is not None:
encryption_key = cpk_info.encryption_key
encryption_key_sha256 = None
if cpk_info is not None:
encryption_key_sha256 = cpk_info.encryption_key_sha256
encryption_algorithm = None
if cpk_info is not None:
encryption_algorithm = cpk_info.encryption_algorithm
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long')
if tier is not None:
header_parameters['x-ms-access-tier'] = self._serialize.header("tier", tier, 'str')
if metadata is not None:
header_parameters['x-ms-meta'] = self._serialize.header("metadata", metadata, 'str')
header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long')
if blob_sequence_number is not None:
header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-blob-type'] = self._serialize.header("self.x_ms_blob_type", self.x_ms_blob_type, 'str')
if blob_content_type is not None:
header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", blob_content_type, 'str')
if blob_content_encoding is not None:
header_parameters['x-ms-blob-content-encoding'] = self._serialize.header("blob_content_encoding", blob_content_encoding, 'str')
if blob_content_language is not None:
header_parameters['x-ms-blob-content-language'] = self._serialize.header("blob_content_language", blob_content_language, 'str')
if blob_content_md5 is not None:
header_parameters['x-ms-blob-content-md5'] = self._serialize.header("blob_content_md5", blob_content_md5, 'bytearray')
if blob_cache_control is not None:
header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", blob_cache_control, 'str')
if blob_content_disposition is not None:
header_parameters['x-ms-blob-content-disposition'] = self._serialize.header("blob_content_disposition", blob_content_disposition, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if encryption_key is not None:
header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", encryption_key, 'str')
if encryption_key_sha256 is not None:
header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", encryption_key_sha256, 'str')
if encryption_algorithm is not None:
header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'EncryptionAlgorithmType')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'Content-MD5': self._deserialize('bytearray', response.headers.get('Content-MD5')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-request-server-encrypted': self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')),
'x-ms-encryption-key-sha256': self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
create.metadata = {'url': '/{containerName}/{blob}'}
def upload_pages(self, body, content_length, transactional_content_md5=None, transactional_content_crc64=None, timeout=None, range=None, request_id=None, lease_access_conditions=None, cpk_info=None, sequence_number_access_conditions=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Upload Pages operation writes a range of pages to a page blob.
:param body: Initial data
:type body: Generator
:param content_length: The length of the request.
:type content_length: long
:param transactional_content_md5: Specify the transactional md5 for
the body, to be validated by the service.
:type transactional_content_md5: bytearray
:param transactional_content_crc64: Specify the transactional crc64
for the body, to be validated by the service.
:type transactional_content_crc64: bytearray
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param range: Return only the bytes of the blob in the specified
range.
:type range: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Additional parameters for the operation
:type cpk_info: ~azure.storage.blob.models.CpkInfo
:param sequence_number_access_conditions: Additional parameters for
the operation
:type sequence_number_access_conditions:
~azure.storage.blob.models.SequenceNumberAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
encryption_key = None
if cpk_info is not None:
encryption_key = cpk_info.encryption_key
encryption_key_sha256 = None
if cpk_info is not None:
encryption_key_sha256 = cpk_info.encryption_key_sha256
encryption_algorithm = None
if cpk_info is not None:
encryption_algorithm = cpk_info.encryption_algorithm
if_sequence_number_less_than_or_equal_to = None
if sequence_number_access_conditions is not None:
if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to
if_sequence_number_less_than = None
if sequence_number_access_conditions is not None:
if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than
if_sequence_number_equal_to = None
if sequence_number_access_conditions is not None:
if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
comp = "page"
page_write = "update"
# Construct URL
url = self.upload_pages.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/octet-stream'
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long')
if transactional_content_md5 is not None:
header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray')
if transactional_content_crc64 is not None:
header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray')
if range is not None:
header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if encryption_key is not None:
header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", encryption_key, 'str')
if encryption_key_sha256 is not None:
header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", encryption_key_sha256, 'str')
if encryption_algorithm is not None:
header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'EncryptionAlgorithmType')
if if_sequence_number_less_than_or_equal_to is not None:
header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, 'long')
if if_sequence_number_less_than is not None:
header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", if_sequence_number_less_than, 'long')
if if_sequence_number_equal_to is not None:
header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", if_sequence_number_equal_to, 'long')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct body
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, stream_content=body)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'Content-MD5': self._deserialize('bytearray', response.headers.get('Content-MD5')),
'x-ms-content-crc64': self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')),
'x-ms-blob-sequence-number': self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-request-server-encrypted': self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')),
'x-ms-encryption-key-sha256': self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
upload_pages.metadata = {'url': '/{containerName}/{blob}'}
def clear_pages(self, content_length, timeout=None, range=None, request_id=None, lease_access_conditions=None, cpk_info=None, sequence_number_access_conditions=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Clear Pages operation clears a set of pages from a page blob.
:param content_length: The length of the request.
:type content_length: long
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param range: Return only the bytes of the blob in the specified
range.
:type range: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Additional parameters for the operation
:type cpk_info: ~azure.storage.blob.models.CpkInfo
:param sequence_number_access_conditions: Additional parameters for
the operation
:type sequence_number_access_conditions:
~azure.storage.blob.models.SequenceNumberAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
encryption_key = None
if cpk_info is not None:
encryption_key = cpk_info.encryption_key
encryption_key_sha256 = None
if cpk_info is not None:
encryption_key_sha256 = cpk_info.encryption_key_sha256
encryption_algorithm = None
if cpk_info is not None:
encryption_algorithm = cpk_info.encryption_algorithm
if_sequence_number_less_than_or_equal_to = None
if sequence_number_access_conditions is not None:
if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to
if_sequence_number_less_than = None
if sequence_number_access_conditions is not None:
if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than
if_sequence_number_equal_to = None
if sequence_number_access_conditions is not None:
if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
comp = "page"
page_write = "clear"
# Construct URL
url = self.clear_pages.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long')
if range is not None:
header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if encryption_key is not None:
header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", encryption_key, 'str')
if encryption_key_sha256 is not None:
header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", encryption_key_sha256, 'str')
if encryption_algorithm is not None:
header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'EncryptionAlgorithmType')
if if_sequence_number_less_than_or_equal_to is not None:
header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, 'long')
if if_sequence_number_less_than is not None:
header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", if_sequence_number_less_than, 'long')
if if_sequence_number_equal_to is not None:
header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", if_sequence_number_equal_to, 'long')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'Content-MD5': self._deserialize('bytearray', response.headers.get('Content-MD5')),
'x-ms-content-crc64': self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')),
'x-ms-blob-sequence-number': self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
clear_pages.metadata = {'url': '/{containerName}/{blob}'}
def upload_pages_from_url(self, source_url, source_range, content_length, range, source_content_md5=None, source_contentcrc64=None, timeout=None, request_id=None, cpk_info=None, lease_access_conditions=None, sequence_number_access_conditions=None, modified_access_conditions=None, source_modified_access_conditions=None, cls=None, **kwargs):
"""The Upload Pages operation writes a range of pages to a page blob where
the contents are read from a URL.
:param source_url: Specify a URL to the copy source.
:type source_url: str
:param source_range: Bytes of source data in the specified range. The
length of this range should match the ContentLength header and
x-ms-range/Range destination range header.
:type source_range: str
:param content_length: The length of the request.
:type content_length: long
:param range: The range of bytes to which the source range would be
written. The range should be 512 aligned and range-end is required.
:type range: str
:param source_content_md5: Specify the md5 calculated for the range of
bytes that must be read from the copy source.
:type source_content_md5: bytearray
:param source_contentcrc64: Specify the crc64 calculated for the range
of bytes that must be read from the copy source.
:type source_contentcrc64: bytearray
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param cpk_info: Additional parameters for the operation
:type cpk_info: ~azure.storage.blob.models.CpkInfo
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param sequence_number_access_conditions: Additional parameters for
the operation
:type sequence_number_access_conditions:
~azure.storage.blob.models.SequenceNumberAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param source_modified_access_conditions: Additional parameters for
the operation
:type source_modified_access_conditions:
~azure.storage.blob.models.SourceModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
encryption_key = None
if cpk_info is not None:
encryption_key = cpk_info.encryption_key
encryption_key_sha256 = None
if cpk_info is not None:
encryption_key_sha256 = cpk_info.encryption_key_sha256
encryption_algorithm = None
if cpk_info is not None:
encryption_algorithm = cpk_info.encryption_algorithm
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_sequence_number_less_than_or_equal_to = None
if sequence_number_access_conditions is not None:
if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to
if_sequence_number_less_than = None
if sequence_number_access_conditions is not None:
if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than
if_sequence_number_equal_to = None
if sequence_number_access_conditions is not None:
if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
source_if_modified_since = None
if source_modified_access_conditions is not None:
source_if_modified_since = source_modified_access_conditions.source_if_modified_since
source_if_unmodified_since = None
if source_modified_access_conditions is not None:
source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since
source_if_match = None
if source_modified_access_conditions is not None:
source_if_match = source_modified_access_conditions.source_if_match
source_if_none_match = None
if source_modified_access_conditions is not None:
source_if_none_match = source_modified_access_conditions.source_if_none_match
comp = "page"
page_write = "update"
# Construct URL
url = self.upload_pages_from_url.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['x-ms-copy-source'] = self._serialize.header("source_url", source_url, 'str')
header_parameters['x-ms-source-range'] = self._serialize.header("source_range", source_range, 'str')
if source_content_md5 is not None:
header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray')
if source_contentcrc64 is not None:
header_parameters['x-ms-source-content-crc64'] = self._serialize.header("source_contentcrc64", source_contentcrc64, 'bytearray')
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long')
header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
header_parameters['x-ms-page-write'] = self._serialize.header("page_write", page_write, 'str')
if encryption_key is not None:
header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", encryption_key, 'str')
if encryption_key_sha256 is not None:
header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", encryption_key_sha256, 'str')
if encryption_algorithm is not None:
header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'EncryptionAlgorithmType')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_sequence_number_less_than_or_equal_to is not None:
header_parameters['x-ms-if-sequence-number-le'] = self._serialize.header("if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, 'long')
if if_sequence_number_less_than is not None:
header_parameters['x-ms-if-sequence-number-lt'] = self._serialize.header("if_sequence_number_less_than", if_sequence_number_less_than, 'long')
if if_sequence_number_equal_to is not None:
header_parameters['x-ms-if-sequence-number-eq'] = self._serialize.header("if_sequence_number_equal_to", if_sequence_number_equal_to, 'long')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", source_if_modified_since, 'rfc-1123')
if source_if_unmodified_since is not None:
header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123')
if source_if_match is not None:
header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", source_if_match, 'str')
if source_if_none_match is not None:
header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", source_if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'Content-MD5': self._deserialize('bytearray', response.headers.get('Content-MD5')),
'x-ms-content-crc64': self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')),
'x-ms-blob-sequence-number': self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-request-server-encrypted': self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')),
'x-ms-encryption-key-sha256': self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
upload_pages_from_url.metadata = {'url': '/{containerName}/{blob}'}
def get_page_ranges(self, snapshot=None, timeout=None, range=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Get Page Ranges operation returns the list of valid page ranges for
a page blob or snapshot of a page blob.
:param snapshot: The snapshot parameter is an opaque DateTime value
that, when present, specifies the blob snapshot to retrieve. For more
information on working with blob snapshots, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/creating-a-snapshot-of-a-blob">Creating
a Snapshot of a Blob.</a>
:type snapshot: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param range: Return only the bytes of the blob in the specified
range.
:type range: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: PageList or the result of cls(response)
:rtype: ~azure.storage.blob.models.PageList
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
comp = "pagelist"
# Construct URL
url = self.get_page_ranges.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if snapshot is not None:
query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/xml'
if range is not None:
header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PageList', response)
header_dict = {
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'x-ms-blob-content-length': self._deserialize('long', response.headers.get('x-ms-blob-content-length')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
get_page_ranges.metadata = {'url': '/{containerName}/{blob}'}
def get_page_ranges_diff(self, snapshot=None, timeout=None, prevsnapshot=None, range=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Get Page Ranges Diff operation returns the list of valid page
ranges for a page blob that were changed between target blob and
previous snapshot.
:param snapshot: The snapshot parameter is an opaque DateTime value
that, when present, specifies the blob snapshot to retrieve. For more
information on working with blob snapshots, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/creating-a-snapshot-of-a-blob">Creating
a Snapshot of a Blob.</a>
:type snapshot: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param prevsnapshot: Optional in version 2015-07-08 and newer. The
prevsnapshot parameter is a DateTime value that specifies that the
response will contain only pages that were changed between target blob
and previous snapshot. Changed pages include both updated and cleared
pages. The target blob may be a snapshot, as long as the snapshot
specified by prevsnapshot is the older of the two. Note that
incremental snapshots are currently supported only for blobs created
on or after January 1, 2016.
:type prevsnapshot: str
:param range: Return only the bytes of the blob in the specified
range.
:type range: str
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: PageList or the result of cls(response)
:rtype: ~azure.storage.blob.models.PageList
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
comp = "pagelist"
# Construct URL
url = self.get_page_ranges_diff.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if snapshot is not None:
query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if prevsnapshot is not None:
query_parameters['prevsnapshot'] = self._serialize.query("prevsnapshot", prevsnapshot, 'str')
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/xml'
if range is not None:
header_parameters['x-ms-range'] = self._serialize.header("range", range, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PageList', response)
header_dict = {
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'ETag': self._deserialize('str', response.headers.get('ETag')),
'x-ms-blob-content-length': self._deserialize('long', response.headers.get('x-ms-blob-content-length')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
get_page_ranges_diff.metadata = {'url': '/{containerName}/{blob}'}
def resize(self, blob_content_length, timeout=None, request_id=None, lease_access_conditions=None, cpk_info=None, modified_access_conditions=None, cls=None, **kwargs):
"""Resize the Blob.
:param blob_content_length: This header specifies the maximum size for
the page blob, up to 1 TB. The page blob size must be aligned to a
512-byte boundary.
:type blob_content_length: long
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param cpk_info: Additional parameters for the operation
:type cpk_info: ~azure.storage.blob.models.CpkInfo
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
encryption_key = None
if cpk_info is not None:
encryption_key = cpk_info.encryption_key
encryption_key_sha256 = None
if cpk_info is not None:
encryption_key_sha256 = cpk_info.encryption_key_sha256
encryption_algorithm = None
if cpk_info is not None:
encryption_algorithm = cpk_info.encryption_algorithm
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
comp = "properties"
# Construct URL
url = self.resize.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['x-ms-blob-content-length'] = self._serialize.header("blob_content_length", blob_content_length, 'long')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if encryption_key is not None:
header_parameters['x-ms-encryption-key'] = self._serialize.header("encryption_key", encryption_key, 'str')
if encryption_key_sha256 is not None:
header_parameters['x-ms-encryption-key-sha256'] = self._serialize.header("encryption_key_sha256", encryption_key_sha256, 'str')
if encryption_algorithm is not None:
header_parameters['x-ms-encryption-algorithm'] = self._serialize.header("encryption_algorithm", encryption_algorithm, 'EncryptionAlgorithmType')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-blob-sequence-number': self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
resize.metadata = {'url': '/{containerName}/{blob}'}
def update_sequence_number(self, sequence_number_action, timeout=None, blob_sequence_number=0, request_id=None, lease_access_conditions=None, modified_access_conditions=None, cls=None, **kwargs):
"""Update the sequence number of the blob.
:param sequence_number_action: Required if the
x-ms-blob-sequence-number header is set for the request. This property
applies to page blobs only. This property indicates how the service
should modify the blob's sequence number. Possible values include:
'max', 'update', 'increment'
:type sequence_number_action: str or
~azure.storage.blob.models.SequenceNumberActionType
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param blob_sequence_number: Set for page blobs only. The sequence
number is a user-controlled value that you can use to track requests.
The value of the sequence number must be between 0 and 2^63 - 1.
:type blob_sequence_number: long
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
~azure.storage.blob.models.LeaseAccessConditions
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
lease_id = None
if lease_access_conditions is not None:
lease_id = lease_access_conditions.lease_id
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
comp = "properties"
# Construct URL
url = self.update_sequence_number.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['x-ms-sequence-number-action'] = self._serialize.header("sequence_number_action", sequence_number_action, 'SequenceNumberActionType')
if blob_sequence_number is not None:
header_parameters['x-ms-blob-sequence-number'] = self._serialize.header("blob_sequence_number", blob_sequence_number, 'long')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-blob-sequence-number': self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
update_sequence_number.metadata = {'url': '/{containerName}/{blob}'}
def copy_incremental(self, copy_source, timeout=None, request_id=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Copy Incremental operation copies a snapshot of the source page
blob to a destination page blob. The snapshot is copied such that only
the differential changes between the previously copied snapshot are
transferred to the destination. The copied snapshots are complete
copies of the original snapshot and can be read or copied from as
usual. This API is supported since REST version 2016-05-31.
:param copy_source: Specifies the name of the source page blob
snapshot. This value is a URL of up to 2 KB in length that specifies a
page blob snapshot. The value should be URL-encoded as it would appear
in a request URI. The source blob must either be public or must be
authenticated via a shared access signature.
:type copy_source: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting
Timeouts for Blob Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param modified_access_conditions: Additional parameters for the
operation
:type modified_access_conditions:
~azure.storage.blob.models.ModifiedAccessConditions
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<azure.storage.blob.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
if_modified_since = None
if modified_access_conditions is not None:
if_modified_since = modified_access_conditions.if_modified_since
if_unmodified_since = None
if modified_access_conditions is not None:
if_unmodified_since = modified_access_conditions.if_unmodified_since
if_match = None
if modified_access_conditions is not None:
if_match = modified_access_conditions.if_match
if_none_match = None
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
comp = "incrementalcopy"
# Construct URL
url = self.copy_incremental.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
header_parameters['x-ms-copy-source'] = self._serialize.header("copy_source", copy_source, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'ETag': self._deserialize('str', response.headers.get('ETag')),
'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-copy-id': self._deserialize('str', response.headers.get('x-ms-copy-id')),
'x-ms-copy-status': self._deserialize(models.CopyStatusType, response.headers.get('x-ms-copy-status')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
copy_incremental.metadata = {'url': '/{containerName}/{blob}'}
| 58.799693
| 345
| 0.684753
| 9,588
| 76,616
| 5.226429
| 0.041093
| 0.011614
| 0.033945
| 0.029634
| 0.899184
| 0.875836
| 0.863902
| 0.855501
| 0.85137
| 0.839636
| 0
| 0.00793
| 0.218231
| 76,616
| 1,302
| 346
| 58.844854
| 0.828703
| 0.244923
| 0
| 0.846056
| 0
| 0
| 0.157546
| 0.055961
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012723
| false
| 0
| 0.002545
| 0
| 0.031807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53759fa8764833cdbce6a3235fd950f83986e2b1
| 37,052
|
py
|
Python
|
tools/fileinfo/features/typerefs/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2017-12-14T14:25:17.000Z
|
2019-03-09T03:29:12.000Z
|
tools/fileinfo/features/typerefs/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 10
|
2019-06-14T09:12:55.000Z
|
2021-10-01T12:15:43.000Z
|
tools/fileinfo/features/typerefs/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2019-05-10T14:59:48.000Z
|
2022-03-07T16:34:23.000Z
|
from regression_tests import *
class TestTypeRefNested(Test):
settings = TestSettings(
tool='fileinfo',
input='typeref_hash_nested',
args='--verbose --json'
)
def test_correctly_analyzes_typerefs_nested(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][0]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][0]['name'], 'CompilationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][0]['nameSpace'], 'System.Runtime.CompilerServices')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][1]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][1]['name'], 'RuntimeCompatibilityAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][1]['nameSpace'], 'System.Runtime.CompilerServices')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][2]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][2]['name'], 'DebuggableAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][2]['nameSpace'], 'System.Diagnostics')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][3]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][3]['name'], 'DebuggingModes.DebuggableAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][3]['nameSpace'], 'System.Diagnostics')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][4]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][4]['name'], 'TargetFrameworkAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][4]['nameSpace'], 'System.Runtime.Versioning')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][5]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][5]['name'], 'AssemblyCompanyAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][5]['nameSpace'], 'System.Reflection')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][6]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][6]['name'], 'AssemblyConfigurationAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][6]['nameSpace'], 'System.Reflection')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][7]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][7]['name'], 'AssemblyFileVersionAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][7]['nameSpace'], 'System.Reflection')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][8]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][8]['name'], 'AssemblyInformationalVersionAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][8]['nameSpace'], 'System.Reflection')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][9]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][9]['name'], 'AssemblyProductAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][9]['nameSpace'], 'System.Reflection')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][10]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][10]['name'], 'AssemblyTitleAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][10]['nameSpace'], 'System.Reflection')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][11]['libraryName'], 'System.Runtime')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][11]['name'], 'Object')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][11]['nameSpace'], 'System')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][12]['libraryName'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][12]['name'], 'CustomClass1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][12]['nameSpace'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][13]['libraryName'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][13]['name'], 'CustomClass2')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][13]['nameSpace'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][14]['libraryName'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][14]['name'], 'CustomSubClassA.CustomClass1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][14]['nameSpace'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][15]['libraryName'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][15]['name'], 'CustomSubClassB.CustomClass2')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][15]['nameSpace'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][16]['libraryName'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][16]['name'], 'CustomSubSubClassX.CustomSubClassB.CustomClass2')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][16]['nameSpace'], 'CustomLibrary')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][17]['libraryName'], 'System.Console')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][17]['name'], 'Console')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][17]['nameSpace'], 'System')
def test_correctly_computes_typeref_hash(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['crc32'], '2553516c')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['md5'], '5742603226df6e720f055413ba924c2c')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['sha256'], 'c40e879174d85aaf426217ab5e39de04214245636550b82051a759d2946f15db')
# https://github.com/avast/retdec/issues/363
# Test typeref hashes for .NET files
# Default VS binary
class TestTypeRefHashDefault(Test):
settings = TestSettings(
tool='fileinfo',
input='typeref_hash_default',
args='--verbose --json'
)
def test_correctly_computes_typeref_hash(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['crc32'], 'bb390cc9')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['md5'], '93b7f964c87a94b07d1f6171f0b7d7c1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['sha256'], '37a37a2d4cc651a9b6cf54da949cbb6c89fc1e6e9991628087741d51666c7f1b')
# VS nested class binary with cyclic referencing
class TestTypeRefHashNestedInfinite(Test):
settings = TestSettings(
tool='fileinfo',
input='typeref_hash_nested_infinite',
args='--verbose --json'
)
def test_correctly_computes_typeref_hash(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['crc32'], '0c472e09')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['md5'], 'f8a9907a02d8a1bede13aa8dfc005269')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['sha256'], '9b327187b627a940bfe8645ca3df66500906f1822cfe08e279cbf23a7dd23660')
class TestTypeRefCorruptedAssemblyRefTable(Test):
settings = TestSettings(
tool='fileinfo',
input='057cc3829d8eba8aebc3043eea40af5b05e1eb229012136b43134728e1b46d63',
args='--verbose --json'
)
def test_correctly_parses_typeref_info(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['crc32'], '57e1972c')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['md5'], '84321e6febcdb37d337633ad56f28025')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['sha256'], 'f226ba802a629e8edfdaa08e199e11ead5cd0b0b7d461b725678a11a71dec809')
def test_correctly_parses_typerefs(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][0]['name'], 'indowsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][1]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][2]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][2]['nameSpace'], 'Void')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][3]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][3]['nameSpace'], 'Int32')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][4]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][4]['nameSpace'], 'Boolean')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][5]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][5]['nameSpace'], 'RuntimeCompatibilityAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][6]['name'], 'axationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][6]['nameSpace'], 'DebuggingModes')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][7]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][8]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][8]['nameSpace'], 'String')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][9]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][9]['nameSpace'], 'AssemblyDescriptionAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][10]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][10]['nameSpace'], 'AssemblyCompanyAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][11]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][11]['nameSpace'], 'AssemblyProductAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][12]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][12]['nameSpace'], 'AssemblyTrademarkAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][13]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][13]['nameSpace'], 'lyDescriptionAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][14]['name'], 'wsApp1.wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][14]['nameSpace'], 'MD5CryptoServiceProvider')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][15]['name'], 'wsApp1.wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][15]['nameSpace'], 'MD5CryptoServiceProvider')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][16]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][16]['nameSpace'], 'SuppressIldasmAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][17]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][17]['nameSpace'], 'AssemblyDelaySignAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][18]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][18]['nameSpace'], 'AssemblyConfigurationAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][19]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][19]['nameSpace'], 'aySignAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][20]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][20]['nameSpace'], 'WindowsFormsApplicationBase')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][21]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][21]['nameSpace'], 'Computer')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][22]['name'], 'ompilationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][22]['nameSpace'], 'ApplicationSettingsBase')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][23]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][23]['nameSpace'], 'Form')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][24]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][24]['nameSpace'], 'ValueType')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][25]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][25]['nameSpace'], 'Enum')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][26]['name'], 'onRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][26]['nameSpace'], 'Int32')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][27]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][27]['nameSpace'], 'Application')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][28]['name'], 'onRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][28]['nameSpace'], 'Boolean')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][29]['name'], 'onRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][29]['nameSpace'], 'Object')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][30]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][30]['nameSpace'], 'AuthenticationMode')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][31]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][31]['nameSpace'], 'ShutdownMode')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][32]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][32]['nameSpace'], 'User')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][33]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][33]['nameSpace'], 'Hashtable')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][34]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][34]['nameSpace'], 'Control')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][35]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][35]['nameSpace'], 'Type')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][36]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][36]['nameSpace'], 'RuntimeTypeHandle')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][37]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][37]['nameSpace'], 'Utils')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][38]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][38]['nameSpace'], 'InvalidOperationException')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][39]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][39]['nameSpace'], 'Activator')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][40]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][40]['nameSpace'], 'ProjectData')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][41]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][41]['nameSpace'], 'Exception')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][42]['name'], 'ompilationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][42]['nameSpace'], 'Component')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][43]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][43]['nameSpace'], 'RuntimeHelpers')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][44]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][44]['nameSpace'], 'ResourceManager')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][45]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][45]['nameSpace'], 'CultureInfo')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][46]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][46]['nameSpace'], 'EventArgs')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][47]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][47]['nameSpace'], 'ShutdownEventHandler')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][48]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][48]['nameSpace'], 'IntPtr')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][49]['name'], 'ompilationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][49]['nameSpace'], 'SettingsBase')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][50]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][50]['nameSpace'], 'ObjectFlowControl')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][51]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][51]['nameSpace'], 'Monitor')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][52]['name'], 'ompilationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][52]['nameSpace'], 'IContainer')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][53]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][53]['nameSpace'], 'EventHandler')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][54]['name'], 'laxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][54]['nameSpace'], 'Size')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][55]['name'], 'laxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][55]['nameSpace'], 'Point')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][56]['name'], 'laxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][56]['nameSpace'], 'SizeF')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][57]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][57]['nameSpace'], 'Single')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][58]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][58]['nameSpace'], 'Double')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][59]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][59]['nameSpace'], 'Strings')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][60]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][60]['nameSpace'], 'AppDomain')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][61]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][61]['nameSpace'], 'Convert')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][62]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][62]['nameSpace'], 'Byte')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][63]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][63]['nameSpace'], 'NewLateBinding')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][64]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][64]['nameSpace'], 'Conversions')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][65]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][65]['nameSpace'], 'IDisposable')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][66]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][66]['nameSpace'], 'ButtonBase')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][67]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][67]['nameSpace'], 'ContainerControl')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][68]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][68]['nameSpace'], 'AutoScaleMode')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][69]['name'], 'bute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][69]['nameSpace'], 'ControlCollection')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][70]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][70]['nameSpace'], 'FieldInfo')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][71]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][71]['nameSpace'], 'MethodInfo')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][72]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][72]['nameSpace'], 'Module')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][73]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][73]['nameSpace'], 'MemberInfo')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][74]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][74]['nameSpace'], 'MethodBase')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][75]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][75]['nameSpace'], 'Delegate')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][76]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][76]['nameSpace'], 'IAsyncResult')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][77]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][77]['nameSpace'], 'AsyncCallback')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][78]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][78]['nameSpace'], 'Int64')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][79]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][79]['nameSpace'], 'UInt32')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][80]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][80]['nameSpace'], 'Array')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][81]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][81]['nameSpace'], 'RuntimeFieldHandle')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][82]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][82]['nameSpace'], 'SortedList')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][83]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][83]['nameSpace'], 'UInt64')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][84]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][84]['nameSpace'], 'BitConverter')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][85]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][85]['nameSpace'], 'UInt16')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][86]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][86]['nameSpace'], 'SymmetricAlgorithm')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][87]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][87]['nameSpace'], 'ObjectHandle')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][88]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][88]['nameSpace'], 'RijndaelManaged')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][89]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][89]['nameSpace'], 'MD5CryptoServiceProvider')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][90]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][90]['nameSpace'], 'HashAlgorithm')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][91]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][91]['nameSpace'], 'MemoryStream')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][92]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][92]['nameSpace'], 'ICryptoTransform')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][93]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][93]['nameSpace'], 'BinaryReader')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][94]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][94]['nameSpace'], 'Stream')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][95]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][95]['nameSpace'], 'Encoding')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][96]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][96]['nameSpace'], 'Marshal')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][97]['name'], 'wsApp1.wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][97]['nameSpace'], 'MD5CryptoServiceProvider')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][98]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][98]['nameSpace'], 'PropertyInfo')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][99]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][99]['nameSpace'], 'FileStream')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][100]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][100]['nameSpace'], 'DialogResult')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][101]['name'], 'lationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][101]['nameSpace'], 'MessageBox')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][102]['name'], 'ompilationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][102]['nameSpace'], 'GeneratedCodeAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][103]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][103]['nameSpace'], 'DebuggerHiddenAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][104]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][104]['nameSpace'], 'DebuggerStepThroughAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][105]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][105]['nameSpace'], 'StandardModuleAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][106]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][106]['nameSpace'], 'HideModuleNameAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][107]['name'], 'ompilationRelaxationsAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][107]['nameSpace'], 'HelpKeywordAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][108]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][108]['nameSpace'], 'DebuggerNonUserCodeAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][109]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][109]['nameSpace'], 'CompilerGeneratedAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][110]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][110]['nameSpace'], 'DesignerGeneratedAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][111]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][111]['nameSpace'], 'DebuggerBrowsableAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][112]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][112]['nameSpace'], 'DebuggerBrowsableState')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][113]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][113]['nameSpace'], 'AccessedThroughPropertyAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][114]['name'], 'p1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][114]['nameSpace'], 'MyGroupCollectionAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][115]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][115]['nameSpace'], 'ThreadStaticAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][116]['name'], 'ttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][117]['name'], 'wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][117]['nameSpace'], 'UnmanagedFunctionPointerAttribute')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][118]['name'], 'wsApp1.wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][118]['nameSpace'], 'MD5CryptoServiceProvider')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][119]['name'], 'wsApp1.wsApp1')
self.assertEqual(self.fileinfo.output['dotnetInfo']['typeRefTable']['types'][119]['nameSpace'], 'MD5CryptoServiceProvider')
| 102.637119
| 154
| 0.695806
| 3,482
| 37,052
| 7.395175
| 0.09506
| 0.143534
| 0.222835
| 0.31666
| 0.869359
| 0.869359
| 0.865359
| 0.865359
| 0.842524
| 0.570757
| 0
| 0.030837
| 0.089766
| 37,052
| 360
| 155
| 102.922222
| 0.732669
| 0.003832
| 0
| 0.061947
| 0
| 0
| 0.392098
| 0.059966
| 0
| 0
| 0
| 0
| 0.908555
| 1
| 0.017699
| false
| 0
| 0.00295
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5395b606474e7e00966e90db678f7573a15a1849
| 61,540
|
py
|
Python
|
tb_rest_client/api/api_pe/converter_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 30
|
2020-06-19T06:42:50.000Z
|
2021-08-23T21:16:36.000Z
|
tb_rest_client/api/api_pe/converter_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 25
|
2021-08-30T01:17:27.000Z
|
2022-03-16T14:10:14.000Z
|
tb_rest_client/api/api_pe/converter_controller_api.py
|
samson0v/python_tb_rest_client
|
08ff7898740f7cec2170e85d5c3c89e222e967f7
|
[
"Apache-2.0"
] | 23
|
2020-07-06T13:41:54.000Z
|
2021-08-23T21:04:50.000Z
|
# coding: utf-8
"""
ThingsBoard REST API
ThingsBoard Professional Edition IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3PAAS-RC1
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class ConverterControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_converter_using_delete(self, converter_id, **kwargs): # noqa: E501
"""Delete converter (deleteConverter) # noqa: E501
Deletes the converter and all the relations (from and to the converter). Referencing non-existing converter Id will cause an error. If the converter is associated with the integration, it will not be allowed for deletion. Security check is performed to verify that the user has 'DELETE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_converter_using_delete(converter_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_id: A string value representing the converter id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_converter_using_delete_with_http_info(converter_id, **kwargs) # noqa: E501
else:
(data) = self.delete_converter_using_delete_with_http_info(converter_id, **kwargs) # noqa: E501
return data
def delete_converter_using_delete_with_http_info(self, converter_id, **kwargs): # noqa: E501
"""Delete converter (deleteConverter) # noqa: E501
Deletes the converter and all the relations (from and to the converter). Referencing non-existing converter Id will cause an error. If the converter is associated with the integration, it will not be allowed for deletion. Security check is performed to verify that the user has 'DELETE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_converter_using_delete_with_http_info(converter_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_id: A string value representing the converter id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['converter_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_converter_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'converter_id' is set
if ('converter_id' not in params or
params['converter_id'] is None):
raise ValueError("Missing the required parameter `converter_id` when calling `delete_converter_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'converter_id' in params:
path_params['converterId'] = params['converter_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converter/{converterId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_converter_by_id_using_get(self, converter_id, **kwargs): # noqa: E501
"""Get Converter (getConverterById) # noqa: E501
Fetch the Converter object based on the provided Converter Id. The server checks that the converter is owned by the same tenant. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_converter_by_id_using_get(converter_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_id: A string value representing the converter id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: Converter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_converter_by_id_using_get_with_http_info(converter_id, **kwargs) # noqa: E501
else:
(data) = self.get_converter_by_id_using_get_with_http_info(converter_id, **kwargs) # noqa: E501
return data
def get_converter_by_id_using_get_with_http_info(self, converter_id, **kwargs): # noqa: E501
"""Get Converter (getConverterById) # noqa: E501
Fetch the Converter object based on the provided Converter Id. The server checks that the converter is owned by the same tenant. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_converter_by_id_using_get_with_http_info(converter_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_id: A string value representing the converter id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: Converter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['converter_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_converter_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'converter_id' is set
if ('converter_id' not in params or
params['converter_id'] is None):
raise ValueError("Missing the required parameter `converter_id` when calling `get_converter_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'converter_id' in params:
path_params['converterId'] = params['converter_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converter/{converterId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Converter', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_converters_by_ids_using_get(self, converter_ids, **kwargs): # noqa: E501
"""Get Converters By Ids (getConvertersByIds) # noqa: E501
Requested converters must be owned by tenant which is performing the request. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_converters_by_ids_using_get(converter_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_ids: A list of converter ids, separated by comma ',' (required)
:return: list[Converter]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_converters_by_ids_using_get_with_http_info(converter_ids, **kwargs) # noqa: E501
else:
(data) = self.get_converters_by_ids_using_get_with_http_info(converter_ids, **kwargs) # noqa: E501
return data
def get_converters_by_ids_using_get_with_http_info(self, converter_ids, **kwargs): # noqa: E501
"""Get Converters By Ids (getConvertersByIds) # noqa: E501
Requested converters must be owned by tenant which is performing the request. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_converters_by_ids_using_get_with_http_info(converter_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_ids: A list of converter ids, separated by comma ',' (required)
:return: list[Converter]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['converter_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_converters_by_ids_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'converter_ids' is set
if ('converter_ids' not in params or
params['converter_ids'] is None):
raise ValueError("Missing the required parameter `converter_ids` when calling `get_converters_by_ids_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'converter_ids' in params:
query_params.append(('converterIds', params['converter_ids'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converters{?converterIds}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Converter]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_converters_using_get(self, page_size, page, **kwargs): # noqa: E501
"""Get Converters (getConverters) # noqa: E501
Returns a page of converters owned by tenant. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_converters_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the converter name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataConverter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_converters_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_converters_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_converters_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""Get Converters (getConverters) # noqa: E501
Returns a page of converters owned by tenant. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_converters_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the converter name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataConverter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_converters_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_converters_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_converters_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converters{?page,pageSize,sortOrder,sortProperty,textSearch}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataConverter', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_latest_converter_debug_input_using_get(self, converter_id, **kwargs): # noqa: E501
"""Get latest debug input event (getLatestConverterDebugInput) # noqa: E501
Returns a JSON object of the latest debug event representing the input message the converter processed. ## Uplink Converter Debug Input Event Example ```json { \"inContentType\":\"JSON\", \"inContent\":\"{\\\"temp\\\":40}\", \"inMetadata\":\"{\\\"Header:sec-ch-ua\\\":\\\"\\\\\\\"Chromium\\\\\\\";v=\\\\\\\"94\\\\\\\", \\\\\\\"Google Chrome\\\\\\\";v=\\\\\\\"94\\\\\\\", \\\\\\\";Not A Brand\\\\\\\";v=\\\\\\\"99\\\\\\\"\\\",\\\"Header:user-agent\\\":\\\"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36\\\",\\\"integrationName\\\":\\\"Integration\\\",\\\"Header:cookie\\\":\\\"GUID=zYSs8hymSwZKv8kHALKY; redirect_to=%2F; JSESSIONID=B0A7C8E481409CE7924E738DB04F62F9\\\",\\\"Header:sec-ch-ua-platform\\\":\\\"\\\\\\\"Linux\\\\\\\"\\\",\\\"Header:accept\\\":\\\"*/*\\\",\\\"Header:origin\\\":\\\"http://localhost:8080\\\",\\\"Header:sec-fetch-site\\\":\\\"same-origin\\\",\\\"Header:connection\\\":\\\"keep-alive\\\",\\\"Header:accept-encoding\\\":\\\"gzip, deflate, br\\\",\\\"Header:content-type\\\":\\\"application/json\\\",\\\"Header:content-length\\\":\\\"16\\\",\\\"Header:sec-fetch-mode\\\":\\\"cors\\\",\\\"Header:sec-ch-ua-mobile\\\":\\\"?0\\\",\\\"Header:sec-fetch-dest\\\":\\\"empty\\\",\\\"Header:host\\\":\\\"localhost:8080\\\",\\\"Header:referer\\\":\\\"http://localhost:8080/swagger-ui.html\\\",\\\"Header:accept-language\\\":\\\"en-US,en;q=0.9,ru-RU;q=0.8,ru;q=0.7,uk;q=0.6,und;q=0.5\\\"}\" } ``` * 'inContentType' - content type of the message received by the integration; * 'inContent' - message data received; * 'inMetadata' - integration metadata (e.g. headers). ## Downlink Converter Debug Input Event Example ```json { \"inContentType\":\"JSON\", \"inContent\":\"{\\\"temp\\\":42,\\\"humidity\\\":77}\", \"inMsgType\":\"POST_TELEMETRY_REQUEST\", \"inMetadata\":\"{\\\"data\\\":\\\"40\\\"}\", \"inIntegrationMetadata\":\"{\\\"integrationName\\\":\\\"Integration\\\"}\" } ``` * 'inContentType' - content type of the message received by the integration; * 'inContent' - content of the message pushed from the rule engine; * 'inMsgType' - type of the message pushed from the rule engine; * 'inMetadata' - content of the message metadata pushed from the rule engine; * 'inIntegrationMetadata' - integration metadata. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_latest_converter_debug_input_using_get(converter_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_id: A string value representing the converter id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: JsonNode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_latest_converter_debug_input_using_get_with_http_info(converter_id, **kwargs) # noqa: E501
else:
(data) = self.get_latest_converter_debug_input_using_get_with_http_info(converter_id, **kwargs) # noqa: E501
return data
def get_latest_converter_debug_input_using_get_with_http_info(self, converter_id, **kwargs): # noqa: E501
"""Get latest debug input event (getLatestConverterDebugInput) # noqa: E501
Returns a JSON object of the latest debug event representing the input message the converter processed. ## Uplink Converter Debug Input Event Example ```json { \"inContentType\":\"JSON\", \"inContent\":\"{\\\"temp\\\":40}\", \"inMetadata\":\"{\\\"Header:sec-ch-ua\\\":\\\"\\\\\\\"Chromium\\\\\\\";v=\\\\\\\"94\\\\\\\", \\\\\\\"Google Chrome\\\\\\\";v=\\\\\\\"94\\\\\\\", \\\\\\\";Not A Brand\\\\\\\";v=\\\\\\\"99\\\\\\\"\\\",\\\"Header:user-agent\\\":\\\"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36\\\",\\\"integrationName\\\":\\\"Integration\\\",\\\"Header:cookie\\\":\\\"GUID=zYSs8hymSwZKv8kHALKY; redirect_to=%2F; JSESSIONID=B0A7C8E481409CE7924E738DB04F62F9\\\",\\\"Header:sec-ch-ua-platform\\\":\\\"\\\\\\\"Linux\\\\\\\"\\\",\\\"Header:accept\\\":\\\"*/*\\\",\\\"Header:origin\\\":\\\"http://localhost:8080\\\",\\\"Header:sec-fetch-site\\\":\\\"same-origin\\\",\\\"Header:connection\\\":\\\"keep-alive\\\",\\\"Header:accept-encoding\\\":\\\"gzip, deflate, br\\\",\\\"Header:content-type\\\":\\\"application/json\\\",\\\"Header:content-length\\\":\\\"16\\\",\\\"Header:sec-fetch-mode\\\":\\\"cors\\\",\\\"Header:sec-ch-ua-mobile\\\":\\\"?0\\\",\\\"Header:sec-fetch-dest\\\":\\\"empty\\\",\\\"Header:host\\\":\\\"localhost:8080\\\",\\\"Header:referer\\\":\\\"http://localhost:8080/swagger-ui.html\\\",\\\"Header:accept-language\\\":\\\"en-US,en;q=0.9,ru-RU;q=0.8,ru;q=0.7,uk;q=0.6,und;q=0.5\\\"}\" } ``` * 'inContentType' - content type of the message received by the integration; * 'inContent' - message data received; * 'inMetadata' - integration metadata (e.g. headers). ## Downlink Converter Debug Input Event Example ```json { \"inContentType\":\"JSON\", \"inContent\":\"{\\\"temp\\\":42,\\\"humidity\\\":77}\", \"inMsgType\":\"POST_TELEMETRY_REQUEST\", \"inMetadata\":\"{\\\"data\\\":\\\"40\\\"}\", \"inIntegrationMetadata\":\"{\\\"integrationName\\\":\\\"Integration\\\"}\" } ``` * 'inContentType' - content type of the message received by the integration; * 'inContent' - content of the message pushed from the rule engine; * 'inMsgType' - type of the message pushed from the rule engine; * 'inMetadata' - content of the message metadata pushed from the rule engine; * 'inIntegrationMetadata' - integration metadata. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_latest_converter_debug_input_using_get_with_http_info(converter_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str converter_id: A string value representing the converter id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: JsonNode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['converter_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_latest_converter_debug_input_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'converter_id' is set
if ('converter_id' not in params or
params['converter_id'] is None):
raise ValueError("Missing the required parameter `converter_id` when calling `get_latest_converter_debug_input_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'converter_id' in params:
path_params['converterId'] = params['converter_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converter/{converterId}/debugIn', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JsonNode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_converter_using_post(self, **kwargs): # noqa: E501
"""Create Or Update Converter (saveConverter) # noqa: E501
Create or update the Converter. When creating converter, platform generates Converter Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created converter id will be present in the response. Specify existing Converter id to update the converter. Referencing non-existing converter Id will cause 'Not Found' error. Converter name is unique in the scope of tenant. # Converter Configuration Converter configuration (**'configuration'** field) is the JSON object that should contain one of two possible fields: **'decoder'** or **'encoder'**. The former is used when the converter has UPLINK type, the latter is used - when DOWNLINK type. It can contain both 'decoder' and 'encoder' fields, when the correct one is specified for the appropriate converter type, another one can be set to 'null'. See the examples of each one below. ## Uplink Converter Configuration ```json { \"decoder\":\"// Decode an uplink message from a buffer\\n// payload - array of bytes\\n// metadata - key/value object\\n\\n/** Decoder **/\\n\\n// decode payload to string\\nvar payloadStr = decodeToString(payload);\\n\\n// decode payload to JSON\\n// var data = decodeToJson(payload);\\n\\nvar deviceName = 'Device A';\\nvar deviceType = 'thermostat';\\nvar customerName = 'customer';\\nvar groupName = 'thermostat devices';\\n// use assetName and assetType instead of deviceName and deviceType\\n// to automatically create assets instead of devices.\\n// var assetName = 'Asset A';\\n// var assetType = 'building';\\n\\n// Result object with device/asset attributes/telemetry data\\nvar result = {\\n// Use deviceName and deviceType or assetName and assetType, but not both.\\n deviceName: deviceName,\\n deviceType: deviceType,\\n// assetName: assetName,\\n// assetType: assetType,\\n customerName: customerName,\\n groupName: groupName,\\n attributes: {\\n model: 'Model A',\\n serialNumber: 'SN111',\\n integrationName: metadata['integrationName']\\n },\\n telemetry: {\\n temperature: 42,\\n humidity: 80,\\n rawData: payloadStr\\n }\\n};\\n\\n/** Helper functions **/\\n\\nfunction decodeToString(payload) {\\n return String.fromCharCode.apply(String, payload);\\n}\\n\\nfunction decodeToJson(payload) {\\n // covert payload to string.\\n var str = decodeToString(payload);\\n\\n // parse string to JSON\\n var data = JSON.parse(str);\\n return data;\\n}\\n\\nreturn result;\", \"encoder\":null } ``` Decoder field in the more readable form: ```text // Decode an uplink message from a buffer // payload - array of bytes // metadata - key/value object /** Decoder **/ // decode payload to string var payloadStr = decodeToString(payload); // decode payload to JSON // var data = decodeToJson(payload); var deviceName = 'Device A'; var deviceType = 'thermostat'; var customerName = 'customer'; var groupName = 'thermostat devices'; // use assetName and assetType instead of deviceName and deviceType // to automatically create assets instead of devices. // var assetName = 'Asset A'; // var assetType = 'building'; // Result object with device/asset attributes/telemetry data var result = { // Use deviceName and deviceType or assetName and assetType, but not both. deviceName: deviceName, deviceType: deviceType, // assetName: assetName, // assetType: assetType, customerName: customerName, groupName: groupName, attributes: { model: 'Model A', serialNumber: 'SN111', integrationName: metadata['integrationName'] }, telemetry: { temperature: 42, humidity: 80, rawData: payloadStr } }; /** Helper functions **/ function decodeToString(payload) { return String.fromCharCode.apply(String, payload); } function decodeToJson(payload) { // covert payload to string. var str = decodeToString(payload); // parse string to JSON var data = JSON.parse(str); return data; } return result; ``` ## Downlink Converter Configuration ```json { \"decoder\":null, \"encoder\":\"// Encode downlink data from incoming Rule Engine message\\n\\n// msg - JSON message payload downlink message json\\n// msgType - type of message, for ex. 'ATTRIBUTES_UPDATED', 'POST_TELEMETRY_REQUEST', etc.\\n// metadata - list of key-value pairs with additional data about the message\\n// integrationMetadata - list of key-value pairs with additional data defined in Integration executing this converter\\n\\n/** Encoder **/\\n\\nvar data = {};\\n\\n// Process data from incoming message and metadata\\n\\ndata.tempFreq = msg.temperatureUploadFrequency;\\ndata.humFreq = msg.humidityUploadFrequency;\\n\\ndata.devSerialNumber = metadata['ss_serialNumber'];\\n\\n// Result object with encoded downlink payload\\nvar result = {\\n\\n // downlink data content type: JSON, TEXT or BINARY (base64 format)\\n contentType: \\\"JSON\\\",\\n\\n // downlink data\\n data: JSON.stringify(data),\\n\\n // Optional metadata object presented in key/value format\\n metadata: {\\n topic: metadata['deviceType']+'/'+metadata['deviceName']+'/upload'\\n }\\n\\n};\\n\\nreturn result;\" } ``` Encoder field in the more readable form: ```text // Encode downlink data from incoming Rule Engine message // msg - JSON message payload downlink message json // msgType - type of message, for ex. 'ATTRIBUTES_UPDATED', 'POST_TELEMETRY_REQUEST', etc. // metadata - list of key-value pairs with additional data about the message // integrationMetadata - list of key-value pairs with additional data defined in Integration executing this converter /** Encoder **/ var data = {}; // Process data from incoming message and metadata data.tempFreq = msg.temperatureUploadFrequency; data.humFreq = msg.humidityUploadFrequency; data.devSerialNumber = metadata['ss_serialNumber']; // Result object with encoded downlink payload var result = { // downlink data content type: JSON, TEXT or BINARY (base64 format) contentType: \"JSON\", // downlink data data: JSON.stringify(data), // Optional metadata object presented in key/value format metadata: { topic: metadata['deviceType']+'/'+metadata['deviceName']+'/upload' } }; return result; ``` Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_converter_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Converter body:
:return: Converter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_converter_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.save_converter_using_post_with_http_info(**kwargs) # noqa: E501
return data
def save_converter_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Create Or Update Converter (saveConverter) # noqa: E501
Create or update the Converter. When creating converter, platform generates Converter Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created converter id will be present in the response. Specify existing Converter id to update the converter. Referencing non-existing converter Id will cause 'Not Found' error. Converter name is unique in the scope of tenant. # Converter Configuration Converter configuration (**'configuration'** field) is the JSON object that should contain one of two possible fields: **'decoder'** or **'encoder'**. The former is used when the converter has UPLINK type, the latter is used - when DOWNLINK type. It can contain both 'decoder' and 'encoder' fields, when the correct one is specified for the appropriate converter type, another one can be set to 'null'. See the examples of each one below. ## Uplink Converter Configuration ```json { \"decoder\":\"// Decode an uplink message from a buffer\\n// payload - array of bytes\\n// metadata - key/value object\\n\\n/** Decoder **/\\n\\n// decode payload to string\\nvar payloadStr = decodeToString(payload);\\n\\n// decode payload to JSON\\n// var data = decodeToJson(payload);\\n\\nvar deviceName = 'Device A';\\nvar deviceType = 'thermostat';\\nvar customerName = 'customer';\\nvar groupName = 'thermostat devices';\\n// use assetName and assetType instead of deviceName and deviceType\\n// to automatically create assets instead of devices.\\n// var assetName = 'Asset A';\\n// var assetType = 'building';\\n\\n// Result object with device/asset attributes/telemetry data\\nvar result = {\\n// Use deviceName and deviceType or assetName and assetType, but not both.\\n deviceName: deviceName,\\n deviceType: deviceType,\\n// assetName: assetName,\\n// assetType: assetType,\\n customerName: customerName,\\n groupName: groupName,\\n attributes: {\\n model: 'Model A',\\n serialNumber: 'SN111',\\n integrationName: metadata['integrationName']\\n },\\n telemetry: {\\n temperature: 42,\\n humidity: 80,\\n rawData: payloadStr\\n }\\n};\\n\\n/** Helper functions **/\\n\\nfunction decodeToString(payload) {\\n return String.fromCharCode.apply(String, payload);\\n}\\n\\nfunction decodeToJson(payload) {\\n // covert payload to string.\\n var str = decodeToString(payload);\\n\\n // parse string to JSON\\n var data = JSON.parse(str);\\n return data;\\n}\\n\\nreturn result;\", \"encoder\":null } ``` Decoder field in the more readable form: ```text // Decode an uplink message from a buffer // payload - array of bytes // metadata - key/value object /** Decoder **/ // decode payload to string var payloadStr = decodeToString(payload); // decode payload to JSON // var data = decodeToJson(payload); var deviceName = 'Device A'; var deviceType = 'thermostat'; var customerName = 'customer'; var groupName = 'thermostat devices'; // use assetName and assetType instead of deviceName and deviceType // to automatically create assets instead of devices. // var assetName = 'Asset A'; // var assetType = 'building'; // Result object with device/asset attributes/telemetry data var result = { // Use deviceName and deviceType or assetName and assetType, but not both. deviceName: deviceName, deviceType: deviceType, // assetName: assetName, // assetType: assetType, customerName: customerName, groupName: groupName, attributes: { model: 'Model A', serialNumber: 'SN111', integrationName: metadata['integrationName'] }, telemetry: { temperature: 42, humidity: 80, rawData: payloadStr } }; /** Helper functions **/ function decodeToString(payload) { return String.fromCharCode.apply(String, payload); } function decodeToJson(payload) { // covert payload to string. var str = decodeToString(payload); // parse string to JSON var data = JSON.parse(str); return data; } return result; ``` ## Downlink Converter Configuration ```json { \"decoder\":null, \"encoder\":\"// Encode downlink data from incoming Rule Engine message\\n\\n// msg - JSON message payload downlink message json\\n// msgType - type of message, for ex. 'ATTRIBUTES_UPDATED', 'POST_TELEMETRY_REQUEST', etc.\\n// metadata - list of key-value pairs with additional data about the message\\n// integrationMetadata - list of key-value pairs with additional data defined in Integration executing this converter\\n\\n/** Encoder **/\\n\\nvar data = {};\\n\\n// Process data from incoming message and metadata\\n\\ndata.tempFreq = msg.temperatureUploadFrequency;\\ndata.humFreq = msg.humidityUploadFrequency;\\n\\ndata.devSerialNumber = metadata['ss_serialNumber'];\\n\\n// Result object with encoded downlink payload\\nvar result = {\\n\\n // downlink data content type: JSON, TEXT or BINARY (base64 format)\\n contentType: \\\"JSON\\\",\\n\\n // downlink data\\n data: JSON.stringify(data),\\n\\n // Optional metadata object presented in key/value format\\n metadata: {\\n topic: metadata['deviceType']+'/'+metadata['deviceName']+'/upload'\\n }\\n\\n};\\n\\nreturn result;\" } ``` Encoder field in the more readable form: ```text // Encode downlink data from incoming Rule Engine message // msg - JSON message payload downlink message json // msgType - type of message, for ex. 'ATTRIBUTES_UPDATED', 'POST_TELEMETRY_REQUEST', etc. // metadata - list of key-value pairs with additional data about the message // integrationMetadata - list of key-value pairs with additional data defined in Integration executing this converter /** Encoder **/ var data = {}; // Process data from incoming message and metadata data.tempFreq = msg.temperatureUploadFrequency; data.humFreq = msg.humidityUploadFrequency; data.devSerialNumber = metadata['ss_serialNumber']; // Result object with encoded downlink payload var result = { // downlink data content type: JSON, TEXT or BINARY (base64 format) contentType: \"JSON\", // downlink data data: JSON.stringify(data), // Optional metadata object presented in key/value format metadata: { topic: metadata['deviceType']+'/'+metadata['deviceName']+'/upload' } }; return result; ``` Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_converter_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Converter body:
:return: Converter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_converter_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converter', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Converter', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_down_link_converter_using_post(self, **kwargs): # noqa: E501
"""Test converter function (testDownLinkConverter) # noqa: E501
Returns a JSON object representing the result of the processed incoming message. ## Request Body Example ```json { \"metadata\":{ \"data\":\"40\" }, \"msg\":\"{\\n \\\"temp\\\": 42,\\n \\\"humidity\\\": 77\\n}\", \"msgType\":\"POST_TELEMETRY_REQUEST\", \"integrationMetadata\":{ \"integrationName\":\"Integration\" }, \"encoder\":\"// Encode downlink data from incoming Rule Engine message\\n\\n// msg - JSON message payload downlink message json\\n// msgType - type of message, for ex. 'ATTRIBUTES_UPDATED', 'POST_TELEMETRY_REQUEST', etc.\\n// metadata - list of key-value pairs with additional data about the message\\n// integrationMetadata - list of key-value pairs with additional data defined in Integration executing this converter\\n\\n/** Encoder **/\\n\\nvar data = {};\\n\\n// Process data from incoming message and metadata\\n\\ndata.tempValue = msg.temp;\\ndata.humValue = msg.humidity;\\n\\ndata.devSerialNumber = metadata['ss_serialNumber'];\\n\\n// Result object with encoded downlink payload\\nvar result = {\\n\\n // downlink data content type: JSON, TEXT or BINARY (base64 format)\\n contentType: \\\"JSON\\\",\\n\\n // downlink data\\n data: JSON.stringify(data),\\n\\n // Optional metadata object presented in key/value format\\n metadata: {\\n topic: metadata['deviceType']+'/'+metadata['deviceName']+'/upload'\\n }\\n\\n};\\n\\nreturn result;\" } ``` * 'metadata' - message metadata pushed from the rule engine; * 'msg' - message data pushed from the rule engine; * 'msgType' - type of the message pushed from the rule engine; * 'integrationMetadata' - integration metadata object; * 'encoder' - string representation of the encoder configuration. ## Response Body Example ```json { \"contentType\":\"JSON\", \"data\":\"{\\\"tempValue\\\":42,\\\"humValue\\\":77}\", \"metadata\":{ \"topic\":\"sensor/Temp Sensor/upload\" } } ``` * 'contentType' - downlink data content type; * 'data' - downlink data; * 'metadata' - optional metadata object. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_down_link_converter_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param JsonNode body:
:return: JsonNode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.test_down_link_converter_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.test_down_link_converter_using_post_with_http_info(**kwargs) # noqa: E501
return data
def test_down_link_converter_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Test converter function (testDownLinkConverter) # noqa: E501
Returns a JSON object representing the result of the processed incoming message. ## Request Body Example ```json { \"metadata\":{ \"data\":\"40\" }, \"msg\":\"{\\n \\\"temp\\\": 42,\\n \\\"humidity\\\": 77\\n}\", \"msgType\":\"POST_TELEMETRY_REQUEST\", \"integrationMetadata\":{ \"integrationName\":\"Integration\" }, \"encoder\":\"// Encode downlink data from incoming Rule Engine message\\n\\n// msg - JSON message payload downlink message json\\n// msgType - type of message, for ex. 'ATTRIBUTES_UPDATED', 'POST_TELEMETRY_REQUEST', etc.\\n// metadata - list of key-value pairs with additional data about the message\\n// integrationMetadata - list of key-value pairs with additional data defined in Integration executing this converter\\n\\n/** Encoder **/\\n\\nvar data = {};\\n\\n// Process data from incoming message and metadata\\n\\ndata.tempValue = msg.temp;\\ndata.humValue = msg.humidity;\\n\\ndata.devSerialNumber = metadata['ss_serialNumber'];\\n\\n// Result object with encoded downlink payload\\nvar result = {\\n\\n // downlink data content type: JSON, TEXT or BINARY (base64 format)\\n contentType: \\\"JSON\\\",\\n\\n // downlink data\\n data: JSON.stringify(data),\\n\\n // Optional metadata object presented in key/value format\\n metadata: {\\n topic: metadata['deviceType']+'/'+metadata['deviceName']+'/upload'\\n }\\n\\n};\\n\\nreturn result;\" } ``` * 'metadata' - message metadata pushed from the rule engine; * 'msg' - message data pushed from the rule engine; * 'msgType' - type of the message pushed from the rule engine; * 'integrationMetadata' - integration metadata object; * 'encoder' - string representation of the encoder configuration. ## Response Body Example ```json { \"contentType\":\"JSON\", \"data\":\"{\\\"tempValue\\\":42,\\\"humValue\\\":77}\", \"metadata\":{ \"topic\":\"sensor/Temp Sensor/upload\" } } ``` * 'contentType' - downlink data content type; * 'data' - downlink data; * 'metadata' - optional metadata object. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_down_link_converter_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param JsonNode body:
:return: JsonNode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_down_link_converter_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converter/testDownLink', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JsonNode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_up_link_converter_using_post(self, **kwargs): # noqa: E501
"""Test converter function (testUpLinkConverter) # noqa: E501
Returns a JSON object representing the result of the processed incoming message. ## Request Body Example ```json { \"metadata\":{ }, \"payload\":\"ewogICAgImRhdGEiOiAiZGF0YSIKfQ==\", \"decoder\":\"// Decode an uplink message from a buffer\\n// payload - array of bytes\\n// metadata - key/value object\\n\\n/** Decoder **/\\n\\n// decode payload to string\\nvar payloadStr = decodeToString(payload);\\n\\n// decode payload to JSON\\n// var data = decodeToJson(payload);\\n\\nvar deviceName = 'Device A';\\nvar deviceType = 'thermostat';\\nvar customerName = 'customer';\\nvar groupName = 'thermostat devices';\\n// use assetName and assetType instead of deviceName and deviceType\\n// to automatically create assets instead of devices.\\n// var assetName = 'Asset A';\\n// var assetType = 'building';\\n\\n// Result object with device/asset attributes/telemetry data\\nvar result = {\\n// Use deviceName and deviceType or assetName and assetType, but not both.\\n deviceName: deviceName,\\n deviceType: deviceType,\\n// assetName: assetName,\\n// assetType: assetType,\\n customerName: customerName,\\n groupName: groupName,\\n attributes: {\\n model: 'Model A',\\n serialNumber: 'SN111',\\n integrationName: metadata['integrationName']\\n },\\n telemetry: {\\n temperature: 42,\\n humidity: 80,\\n rawData: payloadStr\\n }\\n};\\n\\n/** Helper functions **/\\n\\nfunction decodeToString(payload) {\\n return String.fromCharCode.apply(String, payload);\\n}\\n\\nfunction decodeToJson(payload) {\\n // covert payload to string.\\n var str = decodeToString(payload);\\n\\n // parse string to JSON\\n var data = JSON.parse(str);\\n return data;\\n}\\n\\nreturn result;\" } ``` * 'metadata' - integration metadata; * 'payload' - base64 string representation of the data; * 'decoder' - string representation of the decoder configuration. ## Response Body Example ```json { \"output\":\"{\\\"deviceName\\\":\\\"Device A\\\",\\\"deviceType\\\":\\\"thermostat\\\",\\\"customerName\\\":\\\"customer\\\",\\\"groupName\\\":\\\"thermostat devices\\\",\\\"attributes\\\":{\\\"model\\\":\\\"Model A\\\",\\\"serialNumber\\\":\\\"SN111\\\"},\\\"telemetry\\\":{\\\"temperature\\\":42,\\\"humidity\\\":80,\\\"rawData\\\":\\\"{\\\\n \\\\\\\"data\\\\\\\": \\\\\\\"data\\\\\\\"\\\\n}\\\"}}\", \"error\":\"\" } ``` * 'output' - string representation of the output message; * 'error' - string representation of the error message. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_up_link_converter_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param JsonNode body:
:return: JsonNode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.test_up_link_converter_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.test_up_link_converter_using_post_with_http_info(**kwargs) # noqa: E501
return data
def test_up_link_converter_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Test converter function (testUpLinkConverter) # noqa: E501
Returns a JSON object representing the result of the processed incoming message. ## Request Body Example ```json { \"metadata\":{ }, \"payload\":\"ewogICAgImRhdGEiOiAiZGF0YSIKfQ==\", \"decoder\":\"// Decode an uplink message from a buffer\\n// payload - array of bytes\\n// metadata - key/value object\\n\\n/** Decoder **/\\n\\n// decode payload to string\\nvar payloadStr = decodeToString(payload);\\n\\n// decode payload to JSON\\n// var data = decodeToJson(payload);\\n\\nvar deviceName = 'Device A';\\nvar deviceType = 'thermostat';\\nvar customerName = 'customer';\\nvar groupName = 'thermostat devices';\\n// use assetName and assetType instead of deviceName and deviceType\\n// to automatically create assets instead of devices.\\n// var assetName = 'Asset A';\\n// var assetType = 'building';\\n\\n// Result object with device/asset attributes/telemetry data\\nvar result = {\\n// Use deviceName and deviceType or assetName and assetType, but not both.\\n deviceName: deviceName,\\n deviceType: deviceType,\\n// assetName: assetName,\\n// assetType: assetType,\\n customerName: customerName,\\n groupName: groupName,\\n attributes: {\\n model: 'Model A',\\n serialNumber: 'SN111',\\n integrationName: metadata['integrationName']\\n },\\n telemetry: {\\n temperature: 42,\\n humidity: 80,\\n rawData: payloadStr\\n }\\n};\\n\\n/** Helper functions **/\\n\\nfunction decodeToString(payload) {\\n return String.fromCharCode.apply(String, payload);\\n}\\n\\nfunction decodeToJson(payload) {\\n // covert payload to string.\\n var str = decodeToString(payload);\\n\\n // parse string to JSON\\n var data = JSON.parse(str);\\n return data;\\n}\\n\\nreturn result;\" } ``` * 'metadata' - integration metadata; * 'payload' - base64 string representation of the data; * 'decoder' - string representation of the decoder configuration. ## Response Body Example ```json { \"output\":\"{\\\"deviceName\\\":\\\"Device A\\\",\\\"deviceType\\\":\\\"thermostat\\\",\\\"customerName\\\":\\\"customer\\\",\\\"groupName\\\":\\\"thermostat devices\\\",\\\"attributes\\\":{\\\"model\\\":\\\"Model A\\\",\\\"serialNumber\\\":\\\"SN111\\\"},\\\"telemetry\\\":{\\\"temperature\\\":42,\\\"humidity\\\":80,\\\"rawData\\\":\\\"{\\\\n \\\\\\\"data\\\\\\\": \\\\\\\"data\\\\\\\"\\\\n}\\\"}}\", \"error\":\"\" } ``` * 'output' - string representation of the output message; * 'error' - string representation of the error message. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_up_link_converter_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param JsonNode body:
:return: JsonNode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_up_link_converter_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/converter/testUpLink', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JsonNode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 75.601966
| 6,368
| 0.648391
| 7,326
| 61,540
| 5.292656
| 0.066885
| 0.024346
| 0.011554
| 0.014855
| 0.974287
| 0.967581
| 0.964254
| 0.958426
| 0.954712
| 0.948367
| 0
| 0.016023
| 0.221124
| 61,540
| 813
| 6,369
| 75.694957
| 0.792915
| 0.626292
| 0
| 0.78291
| 0
| 0
| 0.190305
| 0.06027
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039261
| false
| 0
| 0.009238
| 0
| 0.106236
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
539a4c5f368cd6f5a9830b039fc11233433aa006
| 6,930
|
py
|
Python
|
usb_device_fuzzer.py
|
Xen1thLabs-AE/LLP_Fuzzer
|
8c19ead0033f2a63099125ad005f73e2a0fea6a3
|
[
"MIT"
] | null | null | null |
usb_device_fuzzer.py
|
Xen1thLabs-AE/LLP_Fuzzer
|
8c19ead0033f2a63099125ad005f73e2a0fea6a3
|
[
"MIT"
] | null | null | null |
usb_device_fuzzer.py
|
Xen1thLabs-AE/LLP_Fuzzer
|
8c19ead0033f2a63099125ad005f73e2a0fea6a3
|
[
"MIT"
] | 1
|
2022-03-28T08:27:21.000Z
|
2022-03-28T08:27:21.000Z
|
# Modules used for USB Fuzzer
import usb.core
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--idVendor',
required=True,
help="Enter the vendor id(Vid) of USB target device")
parser.add_argument('-p', '--idProduct',
required=True,
help="Enter the product id(Pid) of USB target device")
args1 = parser.parse_args()
# Code to fuzz the USB device target in microcontrollers
if(args1.idVendor):
args1.idVendor=int(args1.idVendor,16)
if(args1.idProduct):
args1.idProduct=int(args1.idProduct,16)
dev = usb.core.find(idVendor=args1.idVendor, idProduct=args1.idProduct)
interface = 0
if dev.is_kernel_driver_active(interface) is True:
# tell the kernel to detach
dev.detach_kernel_driver(interface)
# claim the device
usb.util.claim_interface(dev, interface)
lang_id = 0x0
length =0xfff
size = 40
print("Fuzzing Started")
print("Fuzzing Get Status with values 0x80 started")
for i in range (0,65536,1):
try:
send = dev.ctrl_transfer(0x80, 0, i, lang_id, length)
if len(send) >= size:
print("Request Sent:", 0x80, 0, i, lang_id, length, "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Status with values 0x80 completed")
print("Fuzzing Get Status with values 0x81 started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x81,0,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x81,0,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Status with values 0x81 completed")
print("Fuzzing Get Status with values 0x82 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x82,0,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x82,0,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Status with values 0x82 completed")
print("Fuzzing Get Status with values 0x83 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x83,0,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x83,0,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Status with values 0x83 completed")
print("Fuzzing Get descriptor with values 0x80 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x80,6,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x80,6,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get descriptor with values 0x80 completed")
print("Fuzzing Get descriptor with values 0x81 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x81,6,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x81,6,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get descriptor with values 0x81 completed")
print("Fuzzing Get descriptor with values 0x82 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x82,6,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x82,6,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get descriptor with values 0x82 completed")
print("Fuzzing Get descriptor with values 0x83 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x83,6,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x83,6,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get descriptor with values 0x83 completed")
print("Fuzzing Get Configuration with values 0x80 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x80,8,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x80,8,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Configuration with values 0x80 completed")
print("Fuzzing Get Configuration with values 0x81 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x81,8,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x81,8,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Configuration with values 0x81 completed")
print("Fuzzing Get Configuration with values 0x82 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x82,8,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x82,8,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Configuration with values 0x82 completed")
print("Fuzzing Get Configuration with values 0x83 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x83,8,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x83,8,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Configuration with values 0x83 completed")
print("Fuzzing Get Interface with values 0x80 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x80,10,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x80,10,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Interface with values 0x80 completed")
print("Fuzzing Get Interface with values 0x81 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x81,10,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x81,10,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Interface with values 0x81 completed")
print("Fuzzing Get Interface with values 0x82 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x82,10,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x82,10,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Interface with values 0x82 completed")
print("Fuzzing Get Interface with values 0x83 Started")
for i in range (0,65535,1):
try:
send = dev.ctrl_transfer(0x83,10,i,lang_id,length)
if len(send) >= size:
print("Request Sent:", 0x83,10,i,lang_id,length , "Received: " "Size :", len(send), str(send))
except:
pass
print("Fuzzing Get Interface with values 0x83 completed")
| 34.65
| 108
| 0.644589
| 1,010
| 6,930
| 4.365347
| 0.091089
| 0.044908
| 0.108868
| 0.094352
| 0.858698
| 0.847811
| 0.847811
| 0.707643
| 0.707643
| 0.700159
| 0
| 0.067422
| 0.223088
| 6,930
| 199
| 109
| 34.824121
| 0.751486
| 0.018038
| 0
| 0.482143
| 0
| 0
| 0.311258
| 0
| 0
| 0
| 0.038852
| 0
| 0
| 1
| 0
| false
| 0.095238
| 0.017857
| 0
| 0.017857
| 0.291667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
53d8644dff5b1918aa13b212920f6fb56e01dbf7
| 17,973
|
py
|
Python
|
src/whop/whopclient/api/products_api.py
|
whopio/whop-python-sdk
|
9b4da585bf81065a9a435cf6651d9a0cd206088c
|
[
"MIT"
] | null | null | null |
src/whop/whopclient/api/products_api.py
|
whopio/whop-python-sdk
|
9b4da585bf81065a9a435cf6651d9a0cd206088c
|
[
"MIT"
] | null | null | null |
src/whop/whopclient/api/products_api.py
|
whopio/whop-python-sdk
|
9b4da585bf81065a9a435cf6651d9a0cd206088c
|
[
"MIT"
] | null | null | null |
"""
Whop API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 1.0.10
Contact: support@whop.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from whop.whopclient.api_client import ApiClient, Endpoint as _Endpoint
from whop.whopclient.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from whop.whopclient.model.confirm_product_request import ConfirmProductRequest
from whop.whopclient.model.confirm_product_response import ConfirmProductResponse
from whop.whopclient.model.create_product_request import CreateProductRequest
from whop.whopclient.model.create_product_response import CreateProductResponse
from whop.whopclient.model.error_response import ErrorResponse
from whop.whopclient.model.get_product_by_id_response import GetProductByIdResponse
from whop.whopclient.model.get_products_response import GetProductsResponse
class ProductsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.confirm_product_endpoint = _Endpoint(
settings={
'response_type': (ConfirmProductResponse,),
'auth': [
'Bearer'
],
'endpoint_path': '/v1/confirm_product',
'operation_id': 'confirm_product',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'confirm_product_request',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'confirm_product_request':
(ConfirmProductRequest,),
},
'attribute_map': {
},
'location_map': {
'confirm_product_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.create_product_endpoint = _Endpoint(
settings={
'response_type': (CreateProductResponse,),
'auth': [
'Bearer'
],
'endpoint_path': '/v1/products',
'operation_id': 'create_product',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'create_product_request',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'create_product_request':
(CreateProductRequest,),
},
'attribute_map': {
},
'location_map': {
'create_product_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.get_product_by_id_endpoint = _Endpoint(
settings={
'response_type': (GetProductByIdResponse,),
'auth': [
'Bearer'
],
'endpoint_path': '/v1/products/{id}',
'operation_id': 'get_product_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_products_endpoint = _Endpoint(
settings={
'response_type': (GetProductsResponse,),
'auth': [
'Bearer'
],
'endpoint_path': '/v1/products',
'operation_id': 'get_products',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
def confirm_product(
self,
**kwargs
):
"""Product Creation Confirmation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.confirm_product(async_req=True)
>>> result = thread.get()
Keyword Args:
confirm_product_request (ConfirmProductRequest): Click the arrow to the right to see the full body.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ConfirmProductResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.confirm_product_endpoint.call_with_http_info(**kwargs)
def create_product(
self,
**kwargs
):
"""Create Product # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_product(async_req=True)
>>> result = thread.get()
Keyword Args:
create_product_request (CreateProductRequest): Click the arrow to the right to see the full body.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CreateProductResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.create_product_endpoint.call_with_http_info(**kwargs)
def get_product_by_id(
self,
id,
**kwargs
):
"""Fetch Product # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_by_id(id, async_req=True)
>>> result = thread.get()
Args:
id (int): ID of the product you wish to fetch.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
GetProductByIdResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.get_product_by_id_endpoint.call_with_http_info(**kwargs)
def get_products(
self,
**kwargs
):
"""Fetch All Products # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_products(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
GetProductsResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.get_products_endpoint.call_with_http_info(**kwargs)
| 35.379921
| 124
| 0.51416
| 1,683
| 17,973
| 5.249554
| 0.117053
| 0.032598
| 0.023543
| 0.024448
| 0.806338
| 0.788002
| 0.74635
| 0.74635
| 0.711941
| 0.691794
| 0
| 0.003358
| 0.403494
| 17,973
| 507
| 125
| 35.449704
| 0.820726
| 0.374339
| 0
| 0.582822
| 1
| 0
| 0.214413
| 0.039047
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015337
| false
| 0
| 0.033742
| 0
| 0.064417
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53db7d65a8a9f290e9c29e9f6221ea36d00f0e25
| 31,098
|
py
|
Python
|
gcloud/datastore/test_connection.py
|
olala7846/gcloud-python
|
f33e557f94f304b38049b5f9b5082ed58dcf295a
|
[
"Apache-2.0"
] | null | null | null |
gcloud/datastore/test_connection.py
|
olala7846/gcloud-python
|
f33e557f94f304b38049b5f9b5082ed58dcf295a
|
[
"Apache-2.0"
] | null | null | null |
gcloud/datastore/test_connection.py
|
olala7846/gcloud-python
|
f33e557f94f304b38049b5f9b5082ed58dcf295a
|
[
"Apache-2.0"
] | null | null | null |
import unittest2
class TestConnection(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.datastore.connection import Connection
return Connection
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults(self):
conn = self._makeOne()
self.assertEqual(conn.credentials, None)
def test_ctor_explicit(self):
creds = object()
conn = self._makeOne(creds)
self.assertTrue(conn.credentials is creds)
def test_http_w_existing(self):
conn = self._makeOne()
conn._http = http = object()
self.assertTrue(conn.http is http)
def test_http_wo_creds(self):
import httplib2
conn = self._makeOne()
self.assertTrue(isinstance(conn.http, httplib2.Http))
def test_http_w_creds(self):
import httplib2
authorized = object()
class Creds(object):
def authorize(self, http):
self._called_with = http
return authorized
creds = Creds()
conn = self._makeOne(creds)
self.assertTrue(conn.http is authorized)
self.assertTrue(isinstance(creds._called_with, httplib2.Http))
def test__request_w_200(self):
DATASET_ID = 'DATASET'
METHOD = 'METHOD'
DATA = b'DATA'
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
METHOD,
])
http = conn._http = Http({'status': '200'}, 'CONTENT')
self.assertEqual(conn._request(DATASET_ID, METHOD, DATA), 'CONTENT')
self.assertEqual(http._called_with['uri'], URI)
self.assertEqual(http._called_with['method'], 'POST')
self.assertEqual(http._called_with['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(http._called_with['headers']['User-Agent'],
conn.USER_AGENT)
self.assertEqual(http._called_with['body'], DATA)
def test__request_not_200(self):
DATASET_ID = 'DATASET'
METHOD = 'METHOD'
DATA = 'DATA'
conn = self._makeOne()
conn._http = Http({'status': '400'}, 'Bad Request')
with self.assertRaises(Exception) as e:
conn._request(DATASET_ID, METHOD, DATA)
self.assertEqual(str(e.exception),
'Request failed. Error was: Bad Request')
def test__rpc(self):
class ReqPB(object):
def SerializeToString(self):
return REQPB
class RspPB(object):
def __init__(self, pb):
self._pb = pb
@classmethod
def FromString(cls, pb):
return cls(pb)
REQPB = b'REQPB'
DATASET_ID = 'DATASET'
METHOD = 'METHOD'
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
METHOD,
])
http = conn._http = Http({'status': '200'}, 'CONTENT')
response = conn._rpc(DATASET_ID, METHOD, ReqPB(), RspPB)
self.assertTrue(isinstance(response, RspPB))
self.assertEqual(response._pb, 'CONTENT')
self.assertEqual(http._called_with['uri'], URI)
self.assertEqual(http._called_with['method'], 'POST')
self.assertEqual(http._called_with['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(http._called_with['headers']['User-Agent'],
conn.USER_AGENT)
self.assertEqual(http._called_with['body'], REQPB)
def test_build_api_url_w_default_base_version(self):
DATASET_ID = 'DATASET'
METHOD = 'METHOD'
klass = self._getTargetClass()
URI = '/'.join([
klass.API_BASE_URL,
'datastore',
klass.API_VERSION,
'datasets',
DATASET_ID,
METHOD,
])
self.assertEqual(klass.build_api_url(DATASET_ID, METHOD), URI)
def test_build_api_url_w_explicit_base_version(self):
BASE = 'http://example.com/'
VER = '3.1415926'
DATASET_ID = 'DATASET'
METHOD = 'METHOD'
klass = self._getTargetClass()
URI = '/'.join([
BASE,
'datastore',
VER,
'datasets',
DATASET_ID,
METHOD,
])
self.assertEqual(klass.build_api_url(DATASET_ID, METHOD, BASE, VER),
URI)
def test_transaction_getter_unset(self):
conn = self._makeOne()
self.assertTrue(conn.transaction() is None)
def test_transaction_setter(self):
xact = object()
conn = self._makeOne()
self.assertTrue(conn.transaction(xact) is conn)
self.assertTrue(conn.transaction() is xact)
def test_mutation_wo_transaction(self):
from gcloud._testing import _Monkey
from gcloud.datastore.connection import datastore_pb
class Mutation(object):
pass
conn = self._makeOne()
with _Monkey(datastore_pb, Mutation=Mutation):
found = conn.mutation()
self.assertTrue(isinstance(found, Mutation))
def test_mutation_w_transaction(self):
class Mutation(object):
pass
class Xact(object):
def mutation(self):
return Mutation()
conn = self._makeOne()
conn.transaction(Xact())
found = conn.mutation()
self.assertTrue(isinstance(found, Mutation))
def test_dataset(self):
DATASET_ID = 'DATASET'
conn = self._makeOne()
dataset = conn.dataset(DATASET_ID)
self.assertTrue(dataset.connection() is conn)
self.assertEqual(dataset.id(), DATASET_ID)
def test_lookup_single_key_empty_response(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.LookupResponse()
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.lookup(DATASET_ID, key_pb), None)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 1)
self.assertEqual(keys[0], key_pb)
def test_lookup_single_key_nonempty_response(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.LookupResponse()
entity = datastore_pb.Entity()
entity.key.CopyFrom(key_pb)
rsp_pb.found.add(entity=entity)
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found = conn.lookup(DATASET_ID, key_pb)
self.assertEqual(found.key.path_element[0].kind, 'Kind')
self.assertEqual(found.key.path_element[0].id, 1234)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 1)
self.assertEqual(keys[0], key_pb)
def test_lookup_multiple_keys_empty_response(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf()
rsp_pb = datastore_pb.LookupResponse()
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.lookup(DATASET_ID, [key_pb1, key_pb2]), [])
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 2)
self.assertEqual(keys[0], key_pb1)
self.assertEqual(keys[1], key_pb2)
def test_run_query_wo_namespace_empty_result(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.query import Query
DATASET_ID = 'DATASET'
KIND = 'Nonesuch'
q_pb = Query(KIND, DATASET_ID).to_protobuf()
rsp_pb = datastore_pb.RunQueryResponse()
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs, end, more, skipped = conn.run_query(DATASET_ID, q_pb)
self.assertEqual(pbs, [])
self.assertEqual(end, '')
self.assertTrue(more)
self.assertEqual(skipped, 0)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace, '')
self.assertEqual(request.query, q_pb)
def test_run_query_w_namespace_nonempty_result(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.query import Query
DATASET_ID = 'DATASET'
KIND = 'Kind'
entity = datastore_pb.Entity()
q_pb = Query(KIND, DATASET_ID).to_protobuf()
rsp_pb = datastore_pb.RunQueryResponse()
rsp_pb.batch.entity_result.add(entity=entity)
rsp_pb.batch.entity_result_type = 1 # FULL
rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs = conn.run_query(DATASET_ID, q_pb, 'NS')[0]
self.assertEqual(len(pbs), 1)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace, 'NS')
self.assertEqual(request.query, q_pb)
def test_begin_transaction_w_existing_transaction(self):
DATASET_ID = 'DATASET'
conn = self._makeOne()
conn.transaction(object())
self.assertRaises(ValueError, conn.begin_transaction, DATASET_ID)
def test_begin_transaction_default_serialize(self):
from gcloud.datastore.connection import datastore_pb
DATASET_ID = 'DATASET'
TRANSACTION = 'TRANSACTION'
rsp_pb = datastore_pb.BeginTransactionResponse()
rsp_pb.transaction = TRANSACTION
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'beginTransaction',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.begin_transaction(DATASET_ID), TRANSACTION)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.BeginTransactionRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.isolation_level, rq_class.SNAPSHOT)
def test_begin_transaction_explicit_serialize(self):
from gcloud.datastore.connection import datastore_pb
DATASET_ID = 'DATASET'
TRANSACTION = 'TRANSACTION'
rsp_pb = datastore_pb.BeginTransactionResponse()
rsp_pb.transaction = TRANSACTION
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'beginTransaction',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.begin_transaction(DATASET_ID, True), TRANSACTION)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.BeginTransactionRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.isolation_level, rq_class.SERIALIZABLE)
def test_commit_wo_transaction(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
mutation = datastore_pb.Mutation()
insert = mutation.upsert.add()
insert.key.CopyFrom(key_pb)
prop = insert.property.add()
prop.name = 'foo'
prop.value.string_value = u'Foo'
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.commit(DATASET_ID, mutation)
self.assertEqual(result.index_updates, 0)
self.assertEqual(list(result.insert_auto_id_key), [])
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, '')
self.assertEqual(request.mutation, mutation)
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_commit_w_transaction(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
class Xact(object):
def id(self):
return 'xact'
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
mutation = datastore_pb.Mutation()
insert = mutation.upsert.add()
insert.key.CopyFrom(key_pb)
prop = insert.property.add()
prop.name = 'foo'
prop.value.string_value = u'Foo'
conn = self._makeOne()
conn.transaction(Xact())
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.commit(DATASET_ID, mutation)
self.assertEqual(result.index_updates, 0)
self.assertEqual(list(result.insert_auto_id_key), [])
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, 'xact')
self.assertEqual(request.mutation, mutation)
self.assertEqual(request.mode, rq_class.TRANSACTIONAL)
def test_rollback_wo_existing_transaction(self):
DATASET_ID = 'DATASET'
conn = self._makeOne()
self.assertRaises(ValueError,
conn.rollback, DATASET_ID)
def test_rollback_w_existing_transaction_no_id(self):
class Xact(object):
def id(self):
return None
DATASET_ID = 'DATASET'
conn = self._makeOne()
conn.transaction(Xact())
self.assertRaises(ValueError,
conn.rollback, DATASET_ID)
def test_rollback_ok(self):
from gcloud.datastore.connection import datastore_pb
DATASET_ID = 'DATASET'
TRANSACTION = 'xact'
class Xact(object):
def id(self):
return TRANSACTION
rsp_pb = datastore_pb.RollbackResponse()
conn = self._makeOne()
conn.transaction(Xact())
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'rollback',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.rollback(DATASET_ID), None)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.RollbackRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, TRANSACTION)
def test_allocate_ids_empty(self):
from gcloud.datastore.connection import datastore_pb
DATASET_ID = 'DATASET'
rsp_pb = datastore_pb.AllocateIdsResponse()
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'allocateIds',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.allocate_ids(DATASET_ID, []), [])
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.AllocateIdsRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(list(request.key), [])
def test_allocate_ids_non_empty(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
before_key_pbs = [
Key(path=[{'kind': 'Kind'}]).to_protobuf(),
Key(path=[{'kind': 'Kind'}]).to_protobuf(),
]
after_key_pbs = [
Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf(),
Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf(),
]
rsp_pb = datastore_pb.AllocateIdsResponse()
rsp_pb.key.add().CopyFrom(after_key_pbs[0])
rsp_pb.key.add().CopyFrom(after_key_pbs[1])
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'allocateIds',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.allocate_ids(DATASET_ID, before_key_pbs),
after_key_pbs)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.AllocateIdsRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(list(request.key), before_key_pbs)
def test_save_entity_wo_transaction_w_upsert(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': u'Foo'})
self.assertEqual(result, True)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, '')
mutation = request.mutation
self.assertEqual(len(mutation.insert_auto_id), 0)
upserts = list(mutation.upsert)
self.assertEqual(len(upserts), 1)
upsert = upserts[0]
self.assertEqual(upsert.key, key_pb)
props = list(upsert.property)
self.assertEqual(len(props), 1)
self.assertEqual(props[0].name, 'foo')
self.assertEqual(props[0].value.string_value, u'Foo')
self.assertEqual(len(mutation.delete), 0)
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_save_entity_wo_transaction_w_auto_id(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind'}]).to_protobuf()
updated_key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
mr_pb = rsp_pb.mutation_result
mr_pb.index_updates = 0
iaik_pb = mr_pb.insert_auto_id_key.add()
iaik_pb.CopyFrom(updated_key_pb)
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': u'Foo'})
self.assertEqual(result, updated_key_pb)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, '')
mutation = request.mutation
inserts = list(mutation.insert_auto_id)
insert = inserts[0]
self.assertEqual(insert.key, key_pb)
props = list(insert.property)
self.assertEqual(len(props), 1)
self.assertEqual(props[0].name, 'foo')
self.assertEqual(props[0].value.string_value, u'Foo')
self.assertEqual(len(inserts), 1)
upserts = list(mutation.upsert)
self.assertEqual(len(upserts), 0)
self.assertEqual(len(mutation.delete), 0)
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_save_entity_w_transaction(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
mutation = datastore_pb.Mutation()
class Xact(object):
def mutation(self):
return mutation
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
conn = self._makeOne()
conn.transaction(Xact())
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': u'Foo'})
self.assertEqual(result, True)
self.assertEqual(http._called_with, None)
mutation = conn.mutation()
self.assertEqual(len(mutation.upsert), 1)
def test_save_entity_w_transaction_nested_entity(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.entity import Entity
from gcloud.datastore.key import Key
mutation = datastore_pb.Mutation()
class Xact(object):
def mutation(self):
return mutation
DATASET_ID = 'DATASET'
nested = Entity()
nested['bar'] = u'Bar'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
conn = self._makeOne()
conn.transaction(Xact())
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': nested})
self.assertEqual(result, True)
self.assertEqual(http._called_with, None)
mutation = conn.mutation()
self.assertEqual(len(mutation.upsert), 1)
def test_delete_entities_wo_transaction(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
conn = self._makeOne()
URI = '/'.join([
conn.API_BASE_URL,
'datastore',
conn.API_VERSION,
'datasets',
DATASET_ID,
'commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.delete_entities(DATASET_ID, [key_pb])
self.assertEqual(result, True)
cw = http._called_with
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, '')
mutation = request.mutation
self.assertEqual(len(mutation.insert_auto_id), 0)
self.assertEqual(len(mutation.upsert), 0)
deletes = list(mutation.delete)
self.assertEqual(len(deletes), 1)
delete = deletes[0]
self.assertEqual(delete, key_pb)
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_delete_entities_w_transaction(self):
from gcloud.datastore.connection import datastore_pb
from gcloud.datastore.key import Key
mutation = datastore_pb.Mutation()
class Xact(object):
def mutation(self):
return mutation
DATASET_ID = 'DATASET'
key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf()
rsp_pb = datastore_pb.CommitResponse()
conn = self._makeOne()
conn.transaction(Xact())
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.delete_entities(DATASET_ID, [key_pb])
self.assertEqual(result, True)
self.assertEqual(http._called_with, None)
mutation = conn.mutation()
self.assertEqual(len(mutation.delete), 1)
class Http(object):
_called_with = None
def __init__(self, headers, content):
self._headers = headers
self._content = content
def request(self, **kw):
self._called_with = kw
return self._headers, self._content
| 37.243114
| 79
| 0.593382
| 3,351
| 31,098
| 5.292748
| 0.060281
| 0.128552
| 0.05751
| 0.040595
| 0.840719
| 0.815065
| 0.794599
| 0.76652
| 0.743009
| 0.732691
| 0
| 0.008467
| 0.282205
| 31,098
| 834
| 80
| 37.28777
| 0.786085
| 0.000643
| 0
| 0.747004
| 0
| 0
| 0.081673
| 0.012035
| 0
| 0
| 0
| 0
| 0.225033
| 1
| 0.067909
| false
| 0.002663
| 0.051931
| 0.013316
| 0.157124
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53f2486a39bae775f9a56605406f7a218fba128d
| 44,890
|
py
|
Python
|
tensorflow_estimator/python/estimator/canned/dnn_linear_combined_test.py
|
jquadrino/estimator
|
b9d599e4f82374be43a9a8a2dcdca34968ddde48
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_estimator/python/estimator/canned/dnn_linear_combined_test.py
|
jquadrino/estimator
|
b9d599e4f82374be43a9a8a2dcdca34968ddde48
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_estimator/python/estimator/canned/dnn_linear_combined_test.py
|
jquadrino/estimator
|
b9d599e4f82374be43a9a8a2dcdca34968ddde48
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for dnn_linear_combined.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import tempfile
from absl.testing import parameterized
import numpy as np
import six
from tensorflow.core.example import example_pb2
from tensorflow.core.example import feature_pb2
from tensorflow.python.feature_column import feature_column
from tensorflow.python.feature_column import feature_column_v2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.summary.writer import writer_cache
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import input as input_lib
from tensorflow.python.training import optimizer as optimizer_lib
from tensorflow_estimator.python.estimator import estimator
from tensorflow_estimator.python.estimator.canned import dnn_linear_combined
from tensorflow_estimator.python.estimator.canned import dnn_testing_utils
from tensorflow_estimator.python.estimator.canned import linear_testing_utils
from tensorflow_estimator.python.estimator.canned import prediction_keys
from tensorflow_estimator.python.estimator.export import export
from tensorflow_estimator.python.estimator.inputs import numpy_io
from tensorflow_estimator.python.estimator.inputs import pandas_io
try:
# pylint: disable=g-import-not-at-top
import pandas as pd
HAS_PANDAS = True
except IOError:
# Pandas writes a temporary file during import. If it fails, don't use pandas.
HAS_PANDAS = False
except ImportError:
HAS_PANDAS = False
# This is so that we can easily switch between feature_column and
# feature_column_v2 for testing.
feature_column.numeric_column = feature_column._numeric_column
feature_column.categorical_column_with_hash_bucket = feature_column._categorical_column_with_hash_bucket # pylint: disable=line-too-long
feature_column.categorical_column_with_vocabulary_list = feature_column._categorical_column_with_vocabulary_list # pylint: disable=line-too-long
feature_column.categorical_column_with_vocabulary_file = feature_column._categorical_column_with_vocabulary_file # pylint: disable=line-too-long
feature_column.embedding_column = feature_column._embedding_column
class DNNOnlyModelFnTest(dnn_testing_utils.BaseDNNModelFnTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNModelFnTest.__init__(self, self._dnn_only_model_fn)
def _dnn_only_model_fn(self,
features,
labels,
mode,
head,
hidden_units,
feature_columns,
optimizer='Adagrad',
activation_fn=nn.relu,
dropout=None,
input_layer_partitioner=None,
config=None):
return dnn_linear_combined._dnn_linear_combined_model_fn(
features=features,
labels=labels,
mode=mode,
head=head,
linear_feature_columns=[],
dnn_hidden_units=hidden_units,
dnn_feature_columns=feature_columns,
dnn_optimizer=optimizer,
dnn_activation_fn=activation_fn,
dnn_dropout=dropout,
input_layer_partitioner=input_layer_partitioner,
config=config)
# A function to mimic linear-regressor init reuse same tests.
def _linear_regressor_fn(feature_columns,
model_dir=None,
label_dimension=1,
weight_column=None,
optimizer='Ftrl',
config=None,
partitioner=None,
sparse_combiner='sum'):
return dnn_linear_combined.DNNLinearCombinedRegressorV2(
model_dir=model_dir,
linear_feature_columns=feature_columns,
linear_optimizer=optimizer,
label_dimension=label_dimension,
weight_column=weight_column,
input_layer_partitioner=partitioner,
config=config,
linear_sparse_combiner=sparse_combiner)
class LinearOnlyRegressorPartitionerTest(
linear_testing_utils.BaseLinearRegressorPartitionerTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorPartitionerTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column)
class LinearOnlyRegressorPartitionerV2Test(
linear_testing_utils.BaseLinearRegressorPartitionerTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorPartitionerTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column_v2)
class LinearOnlyRegressorEvaluationTest(
linear_testing_utils.BaseLinearRegressorEvaluationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorEvaluationTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column)
class LinearOnlyRegressorEvaluationV2Test(
linear_testing_utils.BaseLinearRegressorEvaluationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorEvaluationTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column_v2)
class LinearOnlyRegressorPredictTest(
linear_testing_utils.BaseLinearRegressorPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorPredictTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column)
class LinearOnlyRegressorPredictV2Test(
linear_testing_utils.BaseLinearRegressorPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorPredictTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column_v2)
class LinearOnlyRegressorIntegrationTest(
linear_testing_utils.BaseLinearRegressorIntegrationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorIntegrationTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column)
class LinearOnlyRegressorIntegrationV2Test(
linear_testing_utils.BaseLinearRegressorIntegrationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorIntegrationTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column_v2)
class LinearOnlyRegressorTrainingTest(
linear_testing_utils.BaseLinearRegressorTrainingTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorTrainingTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column)
class LinearOnlyRegressorTrainingV2Test(
linear_testing_utils.BaseLinearRegressorTrainingTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearRegressorTrainingTest.__init__(
self, _linear_regressor_fn, fc_lib=feature_column_v2)
def _linear_classifier_fn(feature_columns,
model_dir=None,
n_classes=2,
weight_column=None,
label_vocabulary=None,
optimizer='Ftrl',
config=None,
partitioner=None,
sparse_combiner='sum'):
return dnn_linear_combined.DNNLinearCombinedClassifierV2(
model_dir=model_dir,
linear_feature_columns=feature_columns,
linear_optimizer=optimizer,
n_classes=n_classes,
weight_column=weight_column,
label_vocabulary=label_vocabulary,
input_layer_partitioner=partitioner,
config=config,
linear_sparse_combiner=sparse_combiner)
class LinearOnlyClassifierTrainingTest(
linear_testing_utils.BaseLinearClassifierTrainingTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierTrainingTest.__init__(
self, linear_classifier_fn=_linear_classifier_fn, fc_lib=feature_column)
class LinearOnlyClassifierTrainingV2Test(
linear_testing_utils.BaseLinearClassifierTrainingTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierTrainingTest.__init__(
self,
linear_classifier_fn=_linear_classifier_fn,
fc_lib=feature_column_v2)
class LinearOnlyClassifierClassesEvaluationTest(
linear_testing_utils.BaseLinearClassifierEvaluationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierEvaluationTest.__init__(
self, linear_classifier_fn=_linear_classifier_fn, fc_lib=feature_column)
class LinearOnlyClassifierClassesEvaluationV2Test(
linear_testing_utils.BaseLinearClassifierEvaluationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierEvaluationTest.__init__(
self,
linear_classifier_fn=_linear_classifier_fn,
fc_lib=feature_column_v2)
class LinearOnlyClassifierPredictTest(
linear_testing_utils.BaseLinearClassifierPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierPredictTest.__init__(
self, linear_classifier_fn=_linear_classifier_fn, fc_lib=feature_column)
class LinearOnlyClassifierPredictV2Test(
linear_testing_utils.BaseLinearClassifierPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierPredictTest.__init__(
self,
linear_classifier_fn=_linear_classifier_fn,
fc_lib=feature_column_v2)
class LinearOnlyClassifierIntegrationTest(
linear_testing_utils.BaseLinearClassifierIntegrationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierIntegrationTest.__init__(
self, linear_classifier_fn=_linear_classifier_fn, fc_lib=feature_column)
class LinearOnlyClassifierIntegrationV2Test(
linear_testing_utils.BaseLinearClassifierIntegrationTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
linear_testing_utils.BaseLinearClassifierIntegrationTest.__init__(
self,
linear_classifier_fn=_linear_classifier_fn,
fc_lib=feature_column_v2)
@parameterized.parameters((feature_column,), (feature_column_v2,))
class DNNLinearCombinedRegressorIntegrationTest(test.TestCase):
def setUp(self):
self._model_dir = tempfile.mkdtemp()
def tearDown(self):
if self._model_dir:
writer_cache.FileWriterCache.clear()
shutil.rmtree(self._model_dir)
def _test_complete_flow_helper(
self, linear_feature_columns, dnn_feature_columns, feature_spec,
train_input_fn, eval_input_fn, predict_input_fn, input_dimension,
label_dimension, batch_size):
est = dnn_linear_combined.DNNLinearCombinedRegressorV2(
linear_feature_columns=linear_feature_columns,
dnn_hidden_units=(2, 2),
dnn_feature_columns=dnn_feature_columns,
label_dimension=label_dimension,
model_dir=self._model_dir)
# TRAIN
num_steps = 10
est.train(train_input_fn, steps=num_steps)
# EVALUTE
scores = est.evaluate(eval_input_fn)
self.assertEqual(num_steps, scores[ops.GraphKeys.GLOBAL_STEP])
self.assertIn('loss', six.iterkeys(scores))
# PREDICT
predictions = np.array([
x[prediction_keys.PredictionKeys.PREDICTIONS]
for x in est.predict(predict_input_fn)
])
self.assertAllEqual((batch_size, label_dimension), predictions.shape)
# EXPORT
serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
feature_spec)
export_dir = est.export_savedmodel(tempfile.mkdtemp(),
serving_input_receiver_fn)
self.assertTrue(gfile.Exists(export_dir))
def _test_complete_flow(self, train_input_fn, eval_input_fn, predict_input_fn,
input_dimension, label_dimension, batch_size,
fc_impl):
linear_feature_columns = [
fc_impl.numeric_column('x', shape=(input_dimension,))
]
dnn_feature_columns = [
fc_impl.numeric_column('x', shape=(input_dimension,))
]
feature_columns = linear_feature_columns + dnn_feature_columns
feature_spec = feature_column.make_parse_example_spec(feature_columns)
self._test_complete_flow_helper(linear_feature_columns, dnn_feature_columns,
feature_spec, train_input_fn, eval_input_fn,
predict_input_fn, input_dimension,
label_dimension, batch_size)
def _test_complete_flow_mix1(self, train_input_fn, eval_input_fn,
predict_input_fn, input_dimension,
label_dimension, batch_size, fc_impl):
del fc_impl
linear_feature_columns = [
feature_column_v2.numeric_column('x', shape=(input_dimension,))
]
dnn_feature_columns = [
feature_column.numeric_column('x', shape=(input_dimension,))
]
feature_columns = linear_feature_columns + dnn_feature_columns
feature_spec = feature_column.make_parse_example_spec(feature_columns)
self._test_complete_flow_helper(linear_feature_columns, dnn_feature_columns,
feature_spec, train_input_fn, eval_input_fn,
predict_input_fn, input_dimension,
label_dimension, batch_size)
def _test_complete_flow_mix2(self, train_input_fn, eval_input_fn,
predict_input_fn, input_dimension,
label_dimension, batch_size, fc_impl):
del fc_impl
linear_feature_columns = [
feature_column.numeric_column('x', shape=(input_dimension,))
]
dnn_feature_columns = [
feature_column_v2.numeric_column('x', shape=(input_dimension,))
]
feature_columns = linear_feature_columns + dnn_feature_columns
feature_spec = feature_column.make_parse_example_spec(feature_columns)
self._test_complete_flow_helper(linear_feature_columns, dnn_feature_columns,
feature_spec, train_input_fn, eval_input_fn,
predict_input_fn, input_dimension,
label_dimension, batch_size)
def _test_numpy_input_fn_helper(self, fc_impl, fn_to_run):
"""Tests complete flow with numpy_input_fn."""
label_dimension = 2
batch_size = 10
data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
data = data.reshape(batch_size, label_dimension)
# learn y = x
train_input_fn = numpy_io.numpy_input_fn(
x={'x': data},
y=data,
batch_size=batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = numpy_io.numpy_input_fn(
x={'x': data},
y=data,
batch_size=batch_size,
shuffle=False)
predict_input_fn = numpy_io.numpy_input_fn(
x={'x': data},
batch_size=batch_size,
shuffle=False)
fn_to_run(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=label_dimension,
label_dimension=label_dimension,
batch_size=batch_size,
fc_impl=fc_impl)
def test_numpy_input_fn_basic(self, fc_impl):
self._test_numpy_input_fn_helper(fc_impl, self._test_complete_flow)
def test_numpy_input_fn_mix1(self, fc_impl):
self._test_numpy_input_fn_helper(fc_impl, self._test_complete_flow_mix1)
def test_numpy_input_fn_mix2(self, fc_impl):
self._test_numpy_input_fn_helper(fc_impl, self._test_complete_flow_mix2)
def _test_pandas_input_fn_helper(self, fc_impl, fn_to_run):
"""Tests complete flow with pandas_input_fn."""
if not HAS_PANDAS:
return
label_dimension = 1
batch_size = 10
data = np.linspace(0., 2., batch_size, dtype=np.float32)
x = pd.DataFrame({'x': data})
y = pd.Series(data)
train_input_fn = pandas_io.pandas_input_fn(
x=x,
y=y,
batch_size=batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = pandas_io.pandas_input_fn(
x=x,
y=y,
batch_size=batch_size,
shuffle=False)
predict_input_fn = pandas_io.pandas_input_fn(
x=x,
batch_size=batch_size,
shuffle=False)
fn_to_run(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=label_dimension,
label_dimension=label_dimension,
batch_size=batch_size,
fc_impl=fc_impl)
def test_pandas_input_fn_basic(self, fc_impl):
self._test_pandas_input_fn_helper(fc_impl, self._test_complete_flow)
def test_pandas_input_fn_mix1(self, fc_impl):
self._test_pandas_input_fn_helper(fc_impl, self._test_complete_flow_mix1)
def test_pandas_input_fn_mix2(self, fc_impl):
self._test_pandas_input_fn_helper(fc_impl, self._test_complete_flow_mix2)
def _test_input_fn_from_parse_example_helper(self, fc_impl, fn_to_run):
"""Tests complete flow with input_fn constructed from parse_example."""
label_dimension = 2
batch_size = 10
data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
data = data.reshape(batch_size, label_dimension)
serialized_examples = []
for datum in data:
example = example_pb2.Example(features=feature_pb2.Features(
feature={
'x': feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=datum)),
'y': feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=datum)),
}))
serialized_examples.append(example.SerializeToString())
feature_spec = {
'x': parsing_ops.FixedLenFeature([label_dimension], dtypes.float32),
'y': parsing_ops.FixedLenFeature([label_dimension], dtypes.float32),
}
def _train_input_fn():
feature_map = parsing_ops.parse_example(serialized_examples, feature_spec)
features = linear_testing_utils.queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _eval_input_fn():
feature_map = parsing_ops.parse_example(
input_lib.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = linear_testing_utils.queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _predict_input_fn():
feature_map = parsing_ops.parse_example(
input_lib.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = linear_testing_utils.queue_parsed_features(feature_map)
features.pop('y')
return features, None
fn_to_run(
train_input_fn=_train_input_fn,
eval_input_fn=_eval_input_fn,
predict_input_fn=_predict_input_fn,
input_dimension=label_dimension,
label_dimension=label_dimension,
batch_size=batch_size,
fc_impl=fc_impl)
def test_input_fn_from_parse_example_basic(self, fc_impl):
self._test_input_fn_from_parse_example_helper(fc_impl,
self._test_complete_flow)
def test_input_fn_from_parse_example_mix1(self, fc_impl):
self._test_input_fn_from_parse_example_helper(fc_impl,
self._test_complete_flow_mix1)
def test_input_fn_from_parse_example_mix2(self, fc_impl):
self._test_input_fn_from_parse_example_helper(fc_impl,
self._test_complete_flow_mix2)
# A function to mimic dnn-classifier init reuse same tests.
def _dnn_classifier_fn(hidden_units,
feature_columns,
model_dir=None,
n_classes=2,
weight_column=None,
label_vocabulary=None,
optimizer='Adagrad',
config=None,
input_layer_partitioner=None):
return dnn_linear_combined.DNNLinearCombinedClassifierV2(
model_dir=model_dir,
dnn_hidden_units=hidden_units,
dnn_feature_columns=feature_columns,
dnn_optimizer=optimizer,
n_classes=n_classes,
weight_column=weight_column,
label_vocabulary=label_vocabulary,
input_layer_partitioner=input_layer_partitioner,
config=config)
class DNNOnlyClassifierEvaluateTest(
dnn_testing_utils.BaseDNNClassifierEvaluateTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierEvaluateTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column)
class DNNOnlyClassifierEvaluateV2Test(
dnn_testing_utils.BaseDNNClassifierEvaluateTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierEvaluateTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column_v2)
class DNNOnlyClassifierPredictTest(
dnn_testing_utils.BaseDNNClassifierPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierPredictTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column)
class DNNOnlyClassifierPredictV2Test(
dnn_testing_utils.BaseDNNClassifierPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierPredictTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column_v2)
class DNNOnlyClassifierTrainTest(
dnn_testing_utils.BaseDNNClassifierTrainTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierTrainTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column)
class DNNOnlyClassifierTrainV2Test(dnn_testing_utils.BaseDNNClassifierTrainTest,
test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierTrainTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column_v2)
# A function to mimic dnn-regressor init reuse same tests.
def _dnn_regressor_fn(hidden_units,
feature_columns,
model_dir=None,
label_dimension=1,
weight_column=None,
optimizer='Adagrad',
config=None,
input_layer_partitioner=None):
return dnn_linear_combined.DNNLinearCombinedRegressorV2(
model_dir=model_dir,
dnn_hidden_units=hidden_units,
dnn_feature_columns=feature_columns,
dnn_optimizer=optimizer,
label_dimension=label_dimension,
weight_column=weight_column,
input_layer_partitioner=input_layer_partitioner,
config=config)
class DNNOnlyRegressorEvaluateTest(
dnn_testing_utils.BaseDNNRegressorEvaluateTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorEvaluateTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column)
class DNNOnlyRegressorEvaluateV2Test(
dnn_testing_utils.BaseDNNRegressorEvaluateTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorEvaluateTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column_v2)
class DNNOnlyRegressorPredictTest(
dnn_testing_utils.BaseDNNRegressorPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorPredictTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column)
class DNNOnlyRegressorPredictV2Test(
dnn_testing_utils.BaseDNNRegressorPredictTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorPredictTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column_v2)
class DNNOnlyRegressorTrainTest(
dnn_testing_utils.BaseDNNRegressorTrainTest, test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorTrainTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column)
class DNNOnlyRegressorTrainV2Test(dnn_testing_utils.BaseDNNRegressorTrainTest,
test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorTrainTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column_v2)
@parameterized.parameters((feature_column,), (feature_column_v2,))
class DNNLinearCombinedClassifierIntegrationTest(test.TestCase):
def setUp(self):
self._model_dir = tempfile.mkdtemp()
def tearDown(self):
if self._model_dir:
writer_cache.FileWriterCache.clear()
shutil.rmtree(self._model_dir)
def _as_label(self, data_in_float):
return np.rint(data_in_float).astype(np.int64)
def _test_complete_flow(self, train_input_fn, eval_input_fn, predict_input_fn,
input_dimension, n_classes, batch_size, fc_impl):
linear_feature_columns = [
fc_impl.numeric_column('x', shape=(input_dimension,))
]
dnn_feature_columns = [
fc_impl.numeric_column('x', shape=(input_dimension,))
]
feature_columns = linear_feature_columns + dnn_feature_columns
est = dnn_linear_combined.DNNLinearCombinedClassifierV2(
linear_feature_columns=linear_feature_columns,
dnn_hidden_units=(2, 2),
dnn_feature_columns=dnn_feature_columns,
n_classes=n_classes,
model_dir=self._model_dir)
# TRAIN
num_steps = 10
est.train(train_input_fn, steps=num_steps)
# EVALUTE
scores = est.evaluate(eval_input_fn)
self.assertEqual(num_steps, scores[ops.GraphKeys.GLOBAL_STEP])
self.assertIn('loss', six.iterkeys(scores))
# PREDICT
predicted_proba = np.array([
x[prediction_keys.PredictionKeys.PROBABILITIES]
for x in est.predict(predict_input_fn)
])
self.assertAllEqual((batch_size, n_classes), predicted_proba.shape)
# EXPORT
feature_spec = feature_column.make_parse_example_spec(feature_columns)
serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
feature_spec)
export_dir = est.export_savedmodel(tempfile.mkdtemp(),
serving_input_receiver_fn)
self.assertTrue(gfile.Exists(export_dir))
def test_numpy_input_fn(self, fc_impl):
"""Tests complete flow with numpy_input_fn."""
n_classes = 3
input_dimension = 2
batch_size = 10
data = np.linspace(
0., n_classes - 1., batch_size * input_dimension, dtype=np.float32)
x_data = data.reshape(batch_size, input_dimension)
y_data = self._as_label(np.reshape(data[:batch_size], (batch_size, 1)))
# learn y = x
train_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data},
y=y_data,
batch_size=batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data},
y=y_data,
batch_size=batch_size,
shuffle=False)
predict_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data},
batch_size=batch_size,
shuffle=False)
self._test_complete_flow(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=input_dimension,
n_classes=n_classes,
batch_size=batch_size,
fc_impl=fc_impl)
def test_pandas_input_fn(self, fc_impl):
"""Tests complete flow with pandas_input_fn."""
if not HAS_PANDAS:
return
input_dimension = 1
n_classes = 2
batch_size = 10
data = np.linspace(0., n_classes - 1., batch_size, dtype=np.float32)
x = pd.DataFrame({'x': data})
y = pd.Series(self._as_label(data))
train_input_fn = pandas_io.pandas_input_fn(
x=x,
y=y,
batch_size=batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = pandas_io.pandas_input_fn(
x=x,
y=y,
batch_size=batch_size,
shuffle=False)
predict_input_fn = pandas_io.pandas_input_fn(
x=x,
batch_size=batch_size,
shuffle=False)
self._test_complete_flow(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=input_dimension,
n_classes=n_classes,
batch_size=batch_size,
fc_impl=fc_impl)
def test_input_fn_from_parse_example(self, fc_impl):
"""Tests complete flow with input_fn constructed from parse_example."""
input_dimension = 2
n_classes = 3
batch_size = 10
data = np.linspace(0., n_classes-1., batch_size * input_dimension,
dtype=np.float32)
data = data.reshape(batch_size, input_dimension)
serialized_examples = []
for datum in data:
example = example_pb2.Example(features=feature_pb2.Features(
feature={
'x':
feature_pb2.Feature(float_list=feature_pb2.FloatList(
value=datum)),
'y':
feature_pb2.Feature(int64_list=feature_pb2.Int64List(
value=self._as_label(datum[:1]))),
}))
serialized_examples.append(example.SerializeToString())
feature_spec = {
'x': parsing_ops.FixedLenFeature([input_dimension], dtypes.float32),
'y': parsing_ops.FixedLenFeature([1], dtypes.int64),
}
def _train_input_fn():
feature_map = parsing_ops.parse_example(serialized_examples, feature_spec)
features = linear_testing_utils.queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _eval_input_fn():
feature_map = parsing_ops.parse_example(
input_lib.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = linear_testing_utils.queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _predict_input_fn():
feature_map = parsing_ops.parse_example(
input_lib.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = linear_testing_utils.queue_parsed_features(feature_map)
features.pop('y')
return features, None
self._test_complete_flow(
train_input_fn=_train_input_fn,
eval_input_fn=_eval_input_fn,
predict_input_fn=_predict_input_fn,
input_dimension=input_dimension,
n_classes=n_classes,
batch_size=batch_size,
fc_impl=fc_impl)
@parameterized.parameters((feature_column,), (feature_column_v2,))
class DNNLinearCombinedTests(test.TestCase):
def setUp(self):
self._model_dir = tempfile.mkdtemp()
def tearDown(self):
if self._model_dir:
shutil.rmtree(self._model_dir)
def _mock_optimizer(self, real_optimizer, var_name_prefix):
"""Verifies global_step is None and var_names start with given prefix."""
def _minimize(loss, global_step=None, var_list=None):
self.assertIsNone(global_step)
trainable_vars = var_list or ops.get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES)
var_names = [var.name for var in trainable_vars]
self.assertTrue(
all([name.startswith(var_name_prefix) for name in var_names]))
# var is used to check this op called by training.
with ops.name_scope(''):
var = variables_lib.Variable(0., name=(var_name_prefix + '_called'))
with ops.control_dependencies([var.assign(100.)]):
return real_optimizer.minimize(loss, global_step, var_list)
optimizer_mock = test.mock.NonCallableMagicMock(
spec=optimizer_lib.Optimizer, wraps=real_optimizer)
optimizer_mock.minimize = test.mock.MagicMock(wraps=_minimize)
return optimizer_mock
def test_train_op_calls_both_dnn_and_linear(self, fc_impl):
opt = gradient_descent.GradientDescentOptimizer(1.)
x_column = fc_impl.numeric_column('x')
input_fn = numpy_io.numpy_input_fn(
x={'x': np.array([[0.], [1.]])},
y=np.array([[0.], [1.]]),
batch_size=1,
shuffle=False)
est = dnn_linear_combined.DNNLinearCombinedClassifierV2(
linear_feature_columns=[x_column],
# verifies linear_optimizer is used only for linear part.
linear_optimizer=self._mock_optimizer(opt, 'linear'),
dnn_hidden_units=(2, 2),
dnn_feature_columns=[x_column],
# verifies dnn_optimizer is used only for linear part.
dnn_optimizer=self._mock_optimizer(opt, 'dnn'),
model_dir=self._model_dir)
est.train(input_fn, steps=1)
# verifies train_op fires linear minimize op
self.assertEqual(100.,
checkpoint_utils.load_variable(
self._model_dir, 'linear_called'))
# verifies train_op fires dnn minimize op
self.assertEqual(100.,
checkpoint_utils.load_variable(
self._model_dir, 'dnn_called'))
def test_dnn_and_linear_logits_are_added(self, fc_impl):
with ops.Graph().as_default():
variables_lib.Variable([[1.0]], name='linear/linear_model/x/weights')
variables_lib.Variable([2.0], name='linear/linear_model/bias_weights')
variables_lib.Variable([[3.0]], name='dnn/hiddenlayer_0/kernel')
variables_lib.Variable([4.0], name='dnn/hiddenlayer_0/bias')
variables_lib.Variable([[5.0]], name='dnn/logits/kernel')
variables_lib.Variable([6.0], name='dnn/logits/bias')
variables_lib.Variable(1, name='global_step', dtype=dtypes.int64)
linear_testing_utils.save_variables_to_ckpt(self._model_dir)
x_column = fc_impl.numeric_column('x')
est = dnn_linear_combined.DNNLinearCombinedRegressorV2(
linear_feature_columns=[x_column],
dnn_hidden_units=[1],
dnn_feature_columns=[x_column],
model_dir=self._model_dir)
input_fn = numpy_io.numpy_input_fn(
x={'x': np.array([[10.]])}, batch_size=1, shuffle=False)
# linear logits = 10*1 + 2 = 12
# dnn logits = (10*3 + 4)*5 + 6 = 176
# logits = dnn + linear = 176 + 12 = 188
self.assertAllClose(
{
prediction_keys.PredictionKeys.PREDICTIONS: [188.],
},
next(est.predict(input_fn=input_fn)))
@parameterized.parameters((feature_column,), (feature_column_v2,))
class DNNLinearCombinedWarmStartingTest(test.TestCase):
def setUp(self):
# Create a directory to save our old checkpoint and vocabularies to.
self._ckpt_and_vocab_dir = tempfile.mkdtemp()
# Make a dummy input_fn.
def _input_fn():
features = {
'age': [[23.], [31.]],
'city': [['Palo Alto'], ['Mountain View']],
}
return features, [0, 1]
self._input_fn = _input_fn
def tearDown(self):
# Clean up checkpoint / vocab dir.
writer_cache.FileWriterCache.clear()
shutil.rmtree(self._ckpt_and_vocab_dir)
def test_classifier_basic_warm_starting(self, fc_impl):
"""Tests correctness of DNNLinearCombinedClassifier default warm-start."""
age = fc_impl.numeric_column('age')
city = fc_impl.embedding_column(
fc_impl.categorical_column_with_vocabulary_list(
'city', vocabulary_list=['Mountain View', 'Palo Alto']),
dimension=5)
# Create a DNNLinearCombinedClassifier and train to save a checkpoint.
dnn_lc_classifier = dnn_linear_combined.DNNLinearCombinedClassifierV2(
linear_feature_columns=[age],
dnn_feature_columns=[city],
dnn_hidden_units=[256, 128],
model_dir=self._ckpt_and_vocab_dir,
n_classes=4,
linear_optimizer='SGD',
dnn_optimizer='SGD')
dnn_lc_classifier.train(input_fn=self._input_fn, max_steps=1)
# Create a second DNNLinearCombinedClassifier, warm-started from the first.
# Use a learning_rate = 0.0 optimizer to check values (use SGD so we don't
# have accumulator values that change).
warm_started_dnn_lc_classifier = (
dnn_linear_combined.DNNLinearCombinedClassifierV2(
linear_feature_columns=[age],
dnn_feature_columns=[city],
dnn_hidden_units=[256, 128],
n_classes=4,
linear_optimizer=gradient_descent.GradientDescentOptimizer(
learning_rate=0.0),
dnn_optimizer=gradient_descent.GradientDescentOptimizer(
learning_rate=0.0),
warm_start_from=dnn_lc_classifier.model_dir))
warm_started_dnn_lc_classifier.train(input_fn=self._input_fn, max_steps=1)
for variable_name in warm_started_dnn_lc_classifier.get_variable_names():
self.assertAllClose(
dnn_lc_classifier.get_variable_value(variable_name),
warm_started_dnn_lc_classifier.get_variable_value(variable_name))
def test_regressor_basic_warm_starting(self, fc_impl):
"""Tests correctness of DNNLinearCombinedRegressor default warm-start."""
age = fc_impl.numeric_column('age')
city = fc_impl.embedding_column(
fc_impl.categorical_column_with_vocabulary_list(
'city', vocabulary_list=['Mountain View', 'Palo Alto']),
dimension=5)
# Create a DNNLinearCombinedRegressor and train to save a checkpoint.
dnn_lc_regressor = dnn_linear_combined.DNNLinearCombinedRegressorV2(
linear_feature_columns=[age],
dnn_feature_columns=[city],
dnn_hidden_units=[256, 128],
model_dir=self._ckpt_and_vocab_dir,
linear_optimizer='SGD',
dnn_optimizer='SGD')
dnn_lc_regressor.train(input_fn=self._input_fn, max_steps=1)
# Create a second DNNLinearCombinedRegressor, warm-started from the first.
# Use a learning_rate = 0.0 optimizer to check values (use SGD so we don't
# have accumulator values that change).
warm_started_dnn_lc_regressor = (
dnn_linear_combined.DNNLinearCombinedRegressorV2(
linear_feature_columns=[age],
dnn_feature_columns=[city],
dnn_hidden_units=[256, 128],
linear_optimizer=gradient_descent.GradientDescentOptimizer(
learning_rate=0.0),
dnn_optimizer=gradient_descent.GradientDescentOptimizer(
learning_rate=0.0),
warm_start_from=dnn_lc_regressor.model_dir))
warm_started_dnn_lc_regressor.train(input_fn=self._input_fn, max_steps=1)
for variable_name in warm_started_dnn_lc_regressor.get_variable_names():
self.assertAllClose(
dnn_lc_regressor.get_variable_value(variable_name),
warm_started_dnn_lc_regressor.get_variable_value(variable_name))
def test_warm_starting_selective_variables(self, fc_impl):
"""Tests selecting variables to warm-start."""
age = fc_impl.numeric_column('age')
city = fc_impl.embedding_column(
fc_impl.categorical_column_with_vocabulary_list(
'city', vocabulary_list=['Mountain View', 'Palo Alto']),
dimension=5)
# Create a DNNLinearCombinedClassifier and train to save a checkpoint.
dnn_lc_classifier = dnn_linear_combined.DNNLinearCombinedClassifierV2(
linear_feature_columns=[age],
dnn_feature_columns=[city],
dnn_hidden_units=[256, 128],
model_dir=self._ckpt_and_vocab_dir,
n_classes=4,
linear_optimizer='SGD',
dnn_optimizer='SGD')
dnn_lc_classifier.train(input_fn=self._input_fn, max_steps=1)
# Create a second DNNLinearCombinedClassifier, warm-started from the first.
# Use a learning_rate = 0.0 optimizer to check values (use SGD so we don't
# have accumulator values that change).
warm_started_dnn_lc_classifier = (
dnn_linear_combined.DNNLinearCombinedClassifierV2(
linear_feature_columns=[age],
dnn_feature_columns=[city],
dnn_hidden_units=[256, 128],
n_classes=4,
linear_optimizer=gradient_descent.GradientDescentOptimizer(
learning_rate=0.0),
dnn_optimizer=gradient_descent.GradientDescentOptimizer(
learning_rate=0.0),
# The provided regular expression will only warm-start the deep
# portion of the model.
warm_start_from=estimator.WarmStartSettings(
ckpt_to_initialize_from=dnn_lc_classifier.model_dir,
vars_to_warm_start='.*(dnn).*')))
warm_started_dnn_lc_classifier.train(input_fn=self._input_fn, max_steps=1)
for variable_name in warm_started_dnn_lc_classifier.get_variable_names():
if 'dnn' in variable_name:
self.assertAllClose(
dnn_lc_classifier.get_variable_value(variable_name),
warm_started_dnn_lc_classifier.get_variable_value(variable_name))
elif 'linear' in variable_name:
linear_values = warm_started_dnn_lc_classifier.get_variable_value(
variable_name)
# Since they're not warm-started, the linear weights will be
# zero-initialized.
self.assertAllClose(np.zeros_like(linear_values), linear_values)
if __name__ == '__main__':
test.main()
| 39.620477
| 145
| 0.714146
| 5,252
| 44,890
| 5.662795
| 0.085491
| 0.035069
| 0.037524
| 0.019804
| 0.821358
| 0.789045
| 0.775495
| 0.749336
| 0.71931
| 0.701288
| 0
| 0.008536
| 0.204077
| 44,890
| 1,132
| 146
| 39.655477
| 0.823869
| 0.090956
| 0
| 0.710857
| 0
| 0
| 0.015814
| 0.002632
| 0
| 0
| 0
| 0
| 0.019429
| 1
| 0.090286
| false
| 0
| 0.038857
| 0.006857
| 0.188571
| 0.001143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54dea6ab501fb2f977276292c51ce7409aee2903
| 196
|
py
|
Python
|
TrackingTools/GsfTracking/python/GsfElectronFit_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
TrackingTools/GsfTracking/python/GsfElectronFit_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
TrackingTools/GsfTracking/python/GsfElectronFit_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
import RecoTracker.TrackProducer.GsfTrackProducer_cfi
GsfGlobalElectronTest = RecoTracker.TrackProducer.GsfTrackProducer_cfi.gsfTrackProducer.clone()
| 21.777778
| 95
| 0.872449
| 18
| 196
| 9.388889
| 0.666667
| 0.284024
| 0.473373
| 0.508876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 196
| 8
| 96
| 24.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
54e6cd295fd2b6ff9fb668971b3dd91859bd35b2
| 27,378
|
py
|
Python
|
servertest.py
|
streettraffic/streettraffic
|
26cbda67b803eb42f3fe70354689648bafd6d718
|
[
"MIT"
] | 23
|
2017-08-09T19:57:23.000Z
|
2021-11-23T12:30:33.000Z
|
servertest.py
|
streettraffic/streettraffic
|
26cbda67b803eb42f3fe70354689648bafd6d718
|
[
"MIT"
] | 1
|
2018-10-04T18:17:11.000Z
|
2018-10-23T00:12:19.000Z
|
servertest.py
|
streettraffic/streettraffic
|
26cbda67b803eb42f3fe70354689648bafd6d718
|
[
"MIT"
] | 4
|
2017-10-12T03:31:34.000Z
|
2021-05-19T15:11:30.000Z
|
## import system module
import json
import rethinkdb as r
import time
import datetime as dt
import asyncio
import pstats, cProfile
## import custom module
from streettraffic.map_resource.utility import Utility
from streettraffic.database import TrafficData
from streettraffic import tools
from streettraffic.server import TrafficServer
class TestTrafficServer(TrafficServer):
async def main_crawler(self):
"""
"""
self.crawler_running = True
while self.crawler_running:
print('start crawling')
self.traffic_data.store_matrix_json(self.traffic_matrix_list)
#self.traffic_data.insert_analytics_traffic_pattern('[33.880079, 33.648894, -84.485086, -84.311365]')
# time management, we want to execute script every 30 minutes
# in order to do that we need to calculate how many seconds we should sleep
current = dt.datetime.utcnow()
if current.minute < 30:
wait_seconds = 30*60 - current.minute*60 - current.second
else:
wait_seconds = 60*60 - current.minute*60 - current.second
print('crawling finished')
await asyncio.sleep(wait_seconds)
settings = {
'app_id': 'F8aPRXcW3MmyUvQ8Z3J9',
'app_code' : 'IVp1_zoGHdLdz0GvD_Eqsw',
'map_tile_base_url': 'https://1.traffic.maps.cit.api.here.com/maptile/2.1/traffictile/newest/normal.day/',
'json_tile_base_url': 'https://traffic.cit.api.here.com/traffic/6.2/flow.json?'
}
## initialize traffic server
traffic_server = TestTrafficServer(database_name="Traffic", database_ip="localhost")
# start
traffic_server.start()
#message = ['getMultipleDaysRouteTraffic', {'geocoded_waypoints': [{'geocoder_status': 'OK', 'place_id': 'ChIJleVd24sD9YgRk9wo-miJbik', 'types': ['street_address']}, {'geocoder_status': 'OK', 'place_id': 'Ei8yMCBQb25jZSBEZSBMZW9uIEF2ZSBORSwgQXRsYW50YSwgR0EgMzAzMDgsIFVTQQ', 'types': ['street_address']}], 'routes': [{'bounds': {'south': 33.754290000000005, 'west': -84.38942, 'north': 33.772310000000004, 'east': -84.37790000000001}, 'copyrights': 'Map data ©2017 Google', 'legs': [{'distance': {'text': '1.8 mi', 'value': 2921}, 'duration': {'text': '7 mins', 'value': 407}, 'end_address': '20 Ponce De Leon Ave NE, Atlanta, GA 30308, USA', 'end_location': {'lat': 33.7722607, 'lng': -84.38570179999999}, 'start_address': '209 Edgewood Ave NE, Atlanta, GA 30303, USA', 'start_location': {'lat': 33.7543628, 'lng': -84.37998879999998}, 'steps': [{'distance': {'text': '0.1 mi', 'value': 179}, 'duration': {'text': '1 min', 'value': 31}, 'end_location': {'lat': 33.7542857, 'lng': -84.37805500000002}, 'polyline': {'points': 'ws_mE|m_bO?cA@m@Fu@?_@@gA@sA?_@'}, 'start_location': {'lat': 33.7543628, 'lng': -84.37998879999998}, 'travel_mode': 'DRIVING', 'encoded_lat_lngs': 'ws_mE|m_bO?cA@m@Fu@?_@@gA@sA?_@', 'path': [{'lat': 33.754360000000005, 'lng': -84.37999}, {'lat': 33.754360000000005, 'lng': -84.37965000000001}, {'lat': 33.75435, 'lng': -84.37942000000001}, {'lat': 33.754310000000004, 'lng': -84.37915000000001}, {'lat': 33.754310000000004, 'lng': -84.37899}, {'lat': 33.7543, 'lng': -84.37863}, {'lat': 33.754290000000005, 'lng': -84.37821000000001}, {'lat': 33.754290000000005, 'lng': -84.37805}], 'lat_lngs': [{'lat': 33.754360000000005, 'lng': -84.37999}, {'lat': 33.754360000000005, 'lng': -84.37965000000001}, {'lat': 33.75435, 'lng': -84.37942000000001}, {'lat': 33.754310000000004, 'lng': -84.37915000000001}, {'lat': 33.754310000000004, 'lng': -84.37899}, {'lat': 33.7543, 'lng': -84.37863}, {'lat': 33.754290000000005, 'lng': -84.37821000000001}, {'lat': 33.754290000000005, 'lng': -84.37805}], 'instructions': 'Head <b>east</b> on <b>Edgewood Ave SE</b> toward <b>Bell St NE</b>', 'maneuver': '', 'start_point': {'lat': 33.7543628, 'lng': -84.37998879999998}, 'end_point': {'lat': 33.7542857, 'lng': -84.37805500000002}}, {'distance': {'text': '0.2 mi', 'value': 384}, 'duration': {'text': '2 mins', 'value': 90}, 'end_location': {'lat': 33.7577324, 'lng': -84.37790949999999}, 'maneuver': 'turn-left', 'polyline': {'points': 'is_mExa_bOYE{AEgACu@AI?sACyE?G?{BE'}, 'start_location': {'lat': 33.7542857, 'lng': -84.37805500000002}, 'travel_mode': 'DRIVING', 'encoded_lat_lngs': 'is_mExa_bOYE{AEgACu@AI?sACyE?G?{BE', 'path': [{'lat': 33.754290000000005, 'lng': -84.37805}, {'lat': 33.75442, 'lng': -84.37802}, {'lat': 33.75488, 'lng': -84.37799000000001}, {'lat': 33.75524, 'lng': -84.37797}, {'lat': 33.75551, 'lng': -84.37796}, {'lat': 33.75556, 'lng': -84.37796}, {'lat': 33.75598, 'lng': -84.37794000000001}, {'lat': 33.757070000000006, 'lng': -84.37794000000001}, {'lat': 33.757110000000004, 'lng': -84.37794000000001}, {'lat': 33.75773, 'lng': -84.37791}], 'lat_lngs': [{'lat': 33.754290000000005, 'lng': -84.37805}, {'lat': 33.75442, 'lng': -84.37802}, {'lat': 33.75488, 'lng': -84.37799000000001}, {'lat': 33.75524, 'lng': -84.37797}, {'lat': 33.75551, 'lng': -84.37796}, {'lat': 33.75556, 'lng': -84.37796}, {'lat': 33.75598, 'lng': -84.37794000000001}, {'lat': 33.757070000000006, 'lng': -84.37794000000001}, {'lat': 33.757110000000004, 'lng': -84.37794000000001}, {'lat': 33.75773, 'lng': -84.37791}], 'instructions': 'Turn <b>left</b> onto <b>Fort St NE</b>', 'start_point': {'lat': 33.7542857, 'lng': -84.37805500000002}, 'end_point': {'lat': 33.7577324, 'lng': -84.37790949999999}}, {'distance': {'text': '0.9 mi', 'value': 1489}, 'duration': {'text': '1 min', 'value': 82}, 'end_location': {'lat': 33.7673689, 'lng': -84.3885947}, 'polyline': {'points': 'yh`mE|`_bOa@AE?G@KBYF]TA@w@h@o@^o@RGBGDEDGJIRs@Tu@\\e@V[TA?i@^YRa@^QPSR_@b@WZm@|@sAnB{@hAABEFA??@A?GLA?ABkA~Am@z@mAvAy@bAq@t@{@x@y@v@C@URCBYX_@^{@bAQTSXc@n@Yf@CDGLUb@Qf@sBxEs@zACF_AhBADQXU`@]f@'}, 'start_location': {'lat': 33.7577324, 'lng': -84.37790949999999}, 'travel_mode': 'DRIVING', 'encoded_lat_lngs': 'yh`mE|`_bOa@AE?G@KBYF]TA@w@h@o@^o@RGBGDEDGJIRs@Tu@\\e@V[TA?i@^YRa@^QPSR_@b@WZm@|@sAnB{@hAABEFA??@A?GLA?ABkA~Am@z@mAvAy@bAq@t@{@x@y@v@C@URCBYX_@^{@bAQTSXc@n@Yf@CDGLUb@Qf@sBxEs@zACF_AhBADQXU`@]f@', 'path': [{'lat': 33.75773, 'lng': -84.37791}, {'lat': 33.7579, 'lng': -84.37790000000001}, {'lat': 33.75793, 'lng': -84.37790000000001}, {'lat': 33.75797, 'lng': -84.37791}, {'lat': 33.758030000000005, 'lng': -84.37793}, {'lat': 33.758160000000004, 'lng': -84.37797}, {'lat': 33.75831, 'lng': -84.37808000000001}, {'lat': 33.758320000000005, 'lng': -84.37809}, {'lat': 33.7586, 'lng': -84.37830000000001}, {'lat': 33.75884, 'lng': -84.37846}, {'lat': 33.759080000000004, 'lng': -84.37856000000001}, {'lat': 33.75912, 'lng': -84.37858000000001}, {'lat': 33.75916, 'lng': -84.37861000000001}, {'lat': 33.759190000000004, 'lng': -84.37864}, {'lat': 33.75923, 'lng': -84.37870000000001}, {'lat': 33.759280000000004, 'lng': -84.37880000000001}, {'lat': 33.75954, 'lng': -84.37891}, {'lat': 33.75981, 'lng': -84.37906000000001}, {'lat': 33.760000000000005, 'lng': -84.37918}, {'lat': 33.76014, 'lng': -84.37929000000001}, {'lat': 33.76015, 'lng': -84.37929000000001}, {'lat': 33.760360000000006, 'lng': -84.37945}, {'lat': 33.760490000000004, 'lng': -84.37955000000001}, {'lat': 33.76066, 'lng': -84.37971}, {'lat': 33.76075, 'lng': -84.3798}, {'lat': 33.760850000000005, 'lng': -84.3799}, {'lat': 33.761010000000006, 'lng': -84.38008}, {'lat': 33.76113, 'lng': -84.38022000000001}, {'lat': 33.76136, 'lng': -84.38053000000001}, {'lat': 33.76178, 'lng': -84.38109}, {'lat': 33.762080000000005, 'lng': -84.38146}, {'lat': 33.76209, 'lng': -84.38148000000001}, {'lat': 33.76212, 'lng': -84.38152000000001}, {'lat': 33.762130000000006, 'lng': -84.38152000000001}, {'lat': 33.762130000000006, 'lng': -84.38153000000001}, {'lat': 33.76214, 'lng': -84.38153000000001}, {'lat': 33.76218, 'lng': -84.3816}, {'lat': 33.762190000000004, 'lng': -84.3816}, {'lat': 33.7622, 'lng': -84.38162000000001}, {'lat': 33.76258, 'lng': -84.38210000000001}, {'lat': 33.76281, 'lng': -84.3824}, {'lat': 33.763200000000005, 'lng': -84.38284}, {'lat': 33.763490000000004, 'lng': -84.38318000000001}, {'lat': 33.763740000000006, 'lng': -84.38345000000001}, {'lat': 33.76404, 'lng': -84.38374}, {'lat': 33.76433, 'lng': -84.38402}, {'lat': 33.76435, 'lng': -84.38403000000001}, {'lat': 33.76446, 'lng': -84.38413000000001}, {'lat': 33.764480000000006, 'lng': -84.38415}, {'lat': 33.764610000000005, 'lng': -84.38428}, {'lat': 33.764770000000006, 'lng': -84.38444000000001}, {'lat': 33.76507, 'lng': -84.38478}, {'lat': 33.76516, 'lng': -84.38489000000001}, {'lat': 33.765260000000005, 'lng': -84.38502000000001}, {'lat': 33.765440000000005, 'lng': -84.38526}, {'lat': 33.765570000000004, 'lng': -84.38546000000001}, {'lat': 33.76559, 'lng': -84.38549}, {'lat': 33.76563, 'lng': -84.38556000000001}, {'lat': 33.76574, 'lng': -84.38574000000001}, {'lat': 33.76583, 'lng': -84.38594}, {'lat': 33.76641, 'lng': -84.38703000000001}, {'lat': 33.766670000000005, 'lng': -84.38749000000001}, {'lat': 33.766690000000004, 'lng': -84.38753000000001}, {'lat': 33.767010000000006, 'lng': -84.38806000000001}, {'lat': 33.76702, 'lng': -84.38809}, {'lat': 33.76711, 'lng': -84.38822}, {'lat': 33.76722, 'lng': -84.38839}, {'lat': 33.76737, 'lng': -84.38859000000001}], 'lat_lngs': [{'lat': 33.75773, 'lng': -84.37791}, {'lat': 33.7579, 'lng': -84.37790000000001}, {'lat': 33.75793, 'lng': -84.37790000000001}, {'lat': 33.75797, 'lng': -84.37791}, {'lat': 33.758030000000005, 'lng': -84.37793}, {'lat': 33.758160000000004, 'lng': -84.37797}, {'lat': 33.75831, 'lng': -84.37808000000001}, {'lat': 33.758320000000005, 'lng': -84.37809}, {'lat': 33.7586, 'lng': -84.37830000000001}, {'lat': 33.75884, 'lng': -84.37846}, {'lat': 33.759080000000004, 'lng': -84.37856000000001}, {'lat': 33.75912, 'lng': -84.37858000000001}, {'lat': 33.75916, 'lng': -84.37861000000001}, {'lat': 33.759190000000004, 'lng': -84.37864}, {'lat': 33.75923, 'lng': -84.37870000000001}, {'lat': 33.759280000000004, 'lng': -84.37880000000001}, {'lat': 33.75954, 'lng': -84.37891}, {'lat': 33.75981, 'lng': -84.37906000000001}, {'lat': 33.760000000000005, 'lng': -84.37918}, {'lat': 33.76014, 'lng': -84.37929000000001}, {'lat': 33.76015, 'lng': -84.37929000000001}, {'lat': 33.760360000000006, 'lng': -84.37945}, {'lat': 33.760490000000004, 'lng': -84.37955000000001}, {'lat': 33.76066, 'lng': -84.37971}, {'lat': 33.76075, 'lng': -84.3798}, {'lat': 33.760850000000005, 'lng': -84.3799}, {'lat': 33.761010000000006, 'lng': -84.38008}, {'lat': 33.76113, 'lng': -84.38022000000001}, {'lat': 33.76136, 'lng': -84.38053000000001}, {'lat': 33.76178, 'lng': -84.38109}, {'lat': 33.762080000000005, 'lng': -84.38146}, {'lat': 33.76209, 'lng': -84.38148000000001}, {'lat': 33.76212, 'lng': -84.38152000000001}, {'lat': 33.762130000000006, 'lng': -84.38152000000001}, {'lat': 33.762130000000006, 'lng': -84.38153000000001}, {'lat': 33.76214, 'lng': -84.38153000000001}, {'lat': 33.76218, 'lng': -84.3816}, {'lat': 33.762190000000004, 'lng': -84.3816}, {'lat': 33.7622, 'lng': -84.38162000000001}, {'lat': 33.76258, 'lng': -84.38210000000001}, {'lat': 33.76281, 'lng': -84.3824}, {'lat': 33.763200000000005, 'lng': -84.38284}, {'lat': 33.763490000000004, 'lng': -84.38318000000001}, {'lat': 33.763740000000006, 'lng': -84.38345000000001}, {'lat': 33.76404, 'lng': -84.38374}, {'lat': 33.76433, 'lng': -84.38402}, {'lat': 33.76435, 'lng': -84.38403000000001}, {'lat': 33.76446, 'lng': -84.38413000000001}, {'lat': 33.764480000000006, 'lng': -84.38415}, {'lat': 33.764610000000005, 'lng': -84.38428}, {'lat': 33.764770000000006, 'lng': -84.38444000000001}, {'lat': 33.76507, 'lng': -84.38478}, {'lat': 33.76516, 'lng': -84.38489000000001}, {'lat': 33.765260000000005, 'lng': -84.38502000000001}, {'lat': 33.765440000000005, 'lng': -84.38526}, {'lat': 33.765570000000004, 'lng': -84.38546000000001}, {'lat': 33.76559, 'lng': -84.38549}, {'lat': 33.76563, 'lng': -84.38556000000001}, {'lat': 33.76574, 'lng': -84.38574000000001}, {'lat': 33.76583, 'lng': -84.38594}, {'lat': 33.76641, 'lng': -84.38703000000001}, {'lat': 33.766670000000005, 'lng': -84.38749000000001}, {'lat': 33.766690000000004, 'lng': -84.38753000000001}, {'lat': 33.767010000000006, 'lng': -84.38806000000001}, {'lat': 33.76702, 'lng': -84.38809}, {'lat': 33.76711, 'lng': -84.38822}, {'lat': 33.76722, 'lng': -84.38839}, {'lat': 33.76737, 'lng': -84.38859000000001}], 'instructions': 'Take the ramp onto <b>I-75 N</b>/<b>I-85 N</b>', 'maneuver': '', 'start_point': {'lat': 33.7577324, 'lng': -84.37790949999999}, 'end_point': {'lat': 33.7673689, 'lng': -84.3885947}}, {'distance': {'text': '0.2 mi', 'value': 295}, 'duration': {'text': '1 min', 'value': 43}, 'end_location': {'lat': 33.7695827, 'lng': -84.38889879999999}, 'maneuver': 'ramp-right', 'polyline': {'points': 'aebmEtcabOEACACAA?A?C?A?A??@A?A@A?c@d@m@b@WPWPQFKFKD[H[HSD_@FQ@I?I?G?GAEAC?GAECGCMICCCCCCACACCGCGAIAGAK?M'}, 'start_location': {'lat': 33.7673689, 'lng': -84.3885947}, 'travel_mode': 'DRIVING', 'encoded_lat_lngs': 'aebmEtcabOEACACAA?A?C?A?A??@A?A@A?c@d@m@b@WPWPQFKFKD[H[HSD_@FQ@I?I?G?GAEAC?GAECGCMICCCCCCACACCGCGAIAGAK?M', 'path': [{'lat': 33.76737, 'lng': -84.38859000000001}, {'lat': 33.7674, 'lng': -84.38858}, {'lat': 33.76742, 'lng': -84.38857}, {'lat': 33.76744, 'lng': -84.38856000000001}, {'lat': 33.767450000000004, 'lng': -84.38856000000001}, {'lat': 33.76746, 'lng': -84.38856000000001}, {'lat': 33.767480000000006, 'lng': -84.38856000000001}, {'lat': 33.76749, 'lng': -84.38856000000001}, {'lat': 33.767500000000005, 'lng': -84.38856000000001}, {'lat': 33.767500000000005, 'lng': -84.38857}, {'lat': 33.76751, 'lng': -84.38857}, {'lat': 33.767520000000005, 'lng': -84.38858}, {'lat': 33.76753, 'lng': -84.38858}, {'lat': 33.76771, 'lng': -84.38877000000001}, {'lat': 33.76794, 'lng': -84.38895000000001}, {'lat': 33.768060000000006, 'lng': -84.38904000000001}, {'lat': 33.76818, 'lng': -84.38913000000001}, {'lat': 33.76827, 'lng': -84.38917000000001}, {'lat': 33.768330000000006, 'lng': -84.38921}, {'lat': 33.768390000000004, 'lng': -84.38924}, {'lat': 33.768530000000005, 'lng': -84.38929}, {'lat': 33.76867, 'lng': -84.38934}, {'lat': 33.76877, 'lng': -84.38937000000001}, {'lat': 33.768930000000005, 'lng': -84.38941000000001}, {'lat': 33.769020000000005, 'lng': -84.38942}, {'lat': 33.76907, 'lng': -84.38942}, {'lat': 33.76912, 'lng': -84.38942}, {'lat': 33.76916, 'lng': -84.38942}, {'lat': 33.769200000000005, 'lng': -84.38941000000001}, {'lat': 33.76923, 'lng': -84.38940000000001}, {'lat': 33.76925, 'lng': -84.38940000000001}, {'lat': 33.769290000000005, 'lng': -84.38939}, {'lat': 33.76932, 'lng': -84.38937000000001}, {'lat': 33.769360000000006, 'lng': -84.38935000000001}, {'lat': 33.76943, 'lng': -84.3893}, {'lat': 33.769450000000006, 'lng': -84.38928000000001}, {'lat': 33.769470000000005, 'lng': -84.38926000000001}, {'lat': 33.769490000000005, 'lng': -84.38924}, {'lat': 33.7695, 'lng': -84.38922000000001}, {'lat': 33.769510000000004, 'lng': -84.3892}, {'lat': 33.76953, 'lng': -84.38916}, {'lat': 33.76955, 'lng': -84.38912}, {'lat': 33.769560000000006, 'lng': -84.38907}, {'lat': 33.76957, 'lng': -84.38903}, {'lat': 33.769580000000005, 'lng': -84.38897}, {'lat': 33.769580000000005, 'lng': -84.3889}], 'lat_lngs': [{'lat': 33.76737, 'lng': -84.38859000000001}, {'lat': 33.7674, 'lng': -84.38858}, {'lat': 33.76742, 'lng': -84.38857}, {'lat': 33.76744, 'lng': -84.38856000000001}, {'lat': 33.767450000000004, 'lng': -84.38856000000001}, {'lat': 33.76746, 'lng': -84.38856000000001}, {'lat': 33.767480000000006, 'lng': -84.38856000000001}, {'lat': 33.76749, 'lng': -84.38856000000001}, {'lat': 33.767500000000005, 'lng': -84.38856000000001}, {'lat': 33.767500000000005, 'lng': -84.38857}, {'lat': 33.76751, 'lng': -84.38857}, {'lat': 33.767520000000005, 'lng': -84.38858}, {'lat': 33.76753, 'lng': -84.38858}, {'lat': 33.76771, 'lng': -84.38877000000001}, {'lat': 33.76794, 'lng': -84.38895000000001}, {'lat': 33.768060000000006, 'lng': -84.38904000000001}, {'lat': 33.76818, 'lng': -84.38913000000001}, {'lat': 33.76827, 'lng': -84.38917000000001}, {'lat': 33.768330000000006, 'lng': -84.38921}, {'lat': 33.768390000000004, 'lng': -84.38924}, {'lat': 33.768530000000005, 'lng': -84.38929}, {'lat': 33.76867, 'lng': -84.38934}, {'lat': 33.76877, 'lng': -84.38937000000001}, {'lat': 33.768930000000005, 'lng': -84.38941000000001}, {'lat': 33.769020000000005, 'lng': -84.38942}, {'lat': 33.76907, 'lng': -84.38942}, {'lat': 33.76912, 'lng': -84.38942}, {'lat': 33.76916, 'lng': -84.38942}, {'lat': 33.769200000000005, 'lng': -84.38941000000001}, {'lat': 33.76923, 'lng': -84.38940000000001}, {'lat': 33.76925, 'lng': -84.38940000000001}, {'lat': 33.769290000000005, 'lng': -84.38939}, {'lat': 33.76932, 'lng': -84.38937000000001}, {'lat': 33.769360000000006, 'lng': -84.38935000000001}, {'lat': 33.76943, 'lng': -84.3893}, {'lat': 33.769450000000006, 'lng': -84.38928000000001}, {'lat': 33.769470000000005, 'lng': -84.38926000000001}, {'lat': 33.769490000000005, 'lng': -84.38924}, {'lat': 33.7695, 'lng': -84.38922000000001}, {'lat': 33.769510000000004, 'lng': -84.3892}, {'lat': 33.76953, 'lng': -84.38916}, {'lat': 33.76955, 'lng': -84.38912}, {'lat': 33.769560000000006, 'lng': -84.38907}, {'lat': 33.76957, 'lng': -84.38903}, {'lat': 33.769580000000005, 'lng': -84.38897}, {'lat': 33.769580000000005, 'lng': -84.3889}], 'instructions': 'Take exit <b>249D</b> toward <b>US-19</b>/<b>US 29 N</b>/<b>W Peachtree St</b>', 'start_point': {'lat': 33.7673689, 'lng': -84.3885947}, 'end_point': {'lat': 33.7695827, 'lng': -84.38889879999999}}, {'distance': {'text': '453 ft', 'value': 138}, 'duration': {'text': '1 min', 'value': 46}, 'end_location': {'lat': 33.7697749, 'lng': -84.38743799999997}, 'polyline': {'points': '{rbmEreabO?[AKCEACA]AIAIAG?GAEAOESAIAMCOAM?OAQ?m@'}, 'start_location': {'lat': 33.7695827, 'lng': -84.38889879999999}, 'travel_mode': 'DRIVING', 'encoded_lat_lngs': '{rbmEreabO?[AKCEACA]AIAIAG?GAEAOESAIAMCOAM?OAQ?m@', 'path': [{'lat': 33.769580000000005, 'lng': -84.3889}, {'lat': 33.769580000000005, 'lng': -84.38876}, {'lat': 33.76959, 'lng': -84.3887}, {'lat': 33.76961, 'lng': -84.38867}, {'lat': 33.76962, 'lng': -84.38865000000001}, {'lat': 33.76963, 'lng': -84.38850000000001}, {'lat': 33.76964, 'lng': -84.38845}, {'lat': 33.769650000000006, 'lng': -84.3884}, {'lat': 33.76966, 'lng': -84.38836}, {'lat': 33.76966, 'lng': -84.38832000000001}, {'lat': 33.769670000000005, 'lng': -84.38829000000001}, {'lat': 33.76968, 'lng': -84.38821}, {'lat': 33.76971, 'lng': -84.38811000000001}, {'lat': 33.76972, 'lng': -84.38806000000001}, {'lat': 33.76973, 'lng': -84.38799}, {'lat': 33.76975, 'lng': -84.38791}, {'lat': 33.769760000000005, 'lng': -84.38784000000001}, {'lat': 33.769760000000005, 'lng': -84.38776}, {'lat': 33.76977, 'lng': -84.38767}, {'lat': 33.76977, 'lng': -84.38744000000001}], 'lat_lngs': [{'lat': 33.769580000000005, 'lng': -84.3889}, {'lat': 33.769580000000005, 'lng': -84.38876}, {'lat': 33.76959, 'lng': -84.3887}, {'lat': 33.76961, 'lng': -84.38867}, {'lat': 33.76962, 'lng': -84.38865000000001}, {'lat': 33.76963, 'lng': -84.38850000000001}, {'lat': 33.76964, 'lng': -84.38845}, {'lat': 33.769650000000006, 'lng': -84.3884}, {'lat': 33.76966, 'lng': -84.38836}, {'lat': 33.76966, 'lng': -84.38832000000001}, {'lat': 33.769670000000005, 'lng': -84.38829000000001}, {'lat': 33.76968, 'lng': -84.38821}, {'lat': 33.76971, 'lng': -84.38811000000001}, {'lat': 33.76972, 'lng': -84.38806000000001}, {'lat': 33.76973, 'lng': -84.38799}, {'lat': 33.76975, 'lng': -84.38791}, {'lat': 33.769760000000005, 'lng': -84.38784000000001}, {'lat': 33.769760000000005, 'lng': -84.38776}, {'lat': 33.76977, 'lng': -84.38767}, {'lat': 33.76977, 'lng': -84.38744000000001}], 'instructions': 'Continue onto <b>Linden Ave NW</b>', 'maneuver': '', 'start_point': {'lat': 33.7695827, 'lng': -84.38889879999999}, 'end_point': {'lat': 33.7697749, 'lng': -84.38743799999997}}, {'distance': {'text': '0.2 mi', 'value': 282}, 'duration': {'text': '1 min', 'value': 69}, 'end_location': {'lat': 33.7723136, 'lng': -84.38736919999997}, 'maneuver': 'turn-left', 'polyline': {'points': 'atbmEn|`bOo@?{BAM?a@?mAAo@?_@Ai@AwAE'}, 'start_location': {'lat': 33.7697749, 'lng': -84.38743799999997}, 'travel_mode': 'DRIVING', 'encoded_lat_lngs': 'atbmEn|`bOo@?{BAM?a@?mAAo@?_@Ai@AwAE', 'path': [{'lat': 33.76977, 'lng': -84.38744000000001}, {'lat': 33.770010000000006, 'lng': -84.38744000000001}, {'lat': 33.770630000000004, 'lng': -84.38743000000001}, {'lat': 33.770700000000005, 'lng': -84.38743000000001}, {'lat': 33.77087, 'lng': -84.38743000000001}, {'lat': 33.771260000000005, 'lng': -84.38742}, {'lat': 33.7715, 'lng': -84.38742}, {'lat': 33.771660000000004, 'lng': -84.38741}, {'lat': 33.77187, 'lng': -84.38740000000001}, {'lat': 33.772310000000004, 'lng': -84.38737}], 'lat_lngs': [{'lat': 33.76977, 'lng': -84.38744000000001}, {'lat': 33.770010000000006, 'lng': -84.38744000000001}, {'lat': 33.770630000000004, 'lng': -84.38743000000001}, {'lat': 33.770700000000005, 'lng': -84.38743000000001}, {'lat': 33.77087, 'lng': -84.38743000000001}, {'lat': 33.771260000000005, 'lng': -84.38742}, {'lat': 33.7715, 'lng': -84.38742}, {'lat': 33.771660000000004, 'lng': -84.38741}, {'lat': 33.77187, 'lng': -84.38740000000001}, {'lat': 33.772310000000004, 'lng': -84.38737}], 'instructions': 'Turn <b>left</b> onto <b>West Peachtree St NW</b>', 'start_point': {'lat': 33.7697749, 'lng': -84.38743799999997}, 'end_point': {'lat': 33.7723136, 'lng': -84.38736919999997}}, {'distance': {'text': '0.1 mi', 'value': 154}, 'duration': {'text': '1 min', 'value': 46}, 'end_location': {'lat': 33.7722607, 'lng': -84.38570179999999}, 'maneuver': 'turn-right', 'polyline': {'points': '}ccmE`|`bOBi@@[?U?K@[?C?wA?M@aA?]'}, 'start_location': {'lat': 33.7723136, 'lng': -84.38736919999997}, 'travel_mode': 'DRIVING', 'encoded_lat_lngs': '}ccmE`|`bOBi@@[?U?K@[?C?wA?M@aA?]', 'path': [{'lat': 33.772310000000004, 'lng': -84.38737}, {'lat': 33.772290000000005, 'lng': -84.38716000000001}, {'lat': 33.77228, 'lng': -84.38702}, {'lat': 33.77228, 'lng': -84.38691}, {'lat': 33.77228, 'lng': -84.38685000000001}, {'lat': 33.772270000000006, 'lng': -84.38671000000001}, {'lat': 33.772270000000006, 'lng': -84.38669}, {'lat': 33.772270000000006, 'lng': -84.38625}, {'lat': 33.772270000000006, 'lng': -84.38618000000001}, {'lat': 33.77226, 'lng': -84.38585}, {'lat': 33.77226, 'lng': -84.3857}], 'lat_lngs': [{'lat': 33.772310000000004, 'lng': -84.38737}, {'lat': 33.772290000000005, 'lng': -84.38716000000001}, {'lat': 33.77228, 'lng': -84.38702}, {'lat': 33.77228, 'lng': -84.38691}, {'lat': 33.77228, 'lng': -84.38685000000001}, {'lat': 33.772270000000006, 'lng': -84.38671000000001}, {'lat': 33.772270000000006, 'lng': -84.38669}, {'lat': 33.772270000000006, 'lng': -84.38625}, {'lat': 33.772270000000006, 'lng': -84.38618000000001}, {'lat': 33.77226, 'lng': -84.38585}, {'lat': 33.77226, 'lng': -84.3857}], 'instructions': 'Turn <b>right</b> onto <b>Ponce De Leon Ave NE</b>', 'start_point': {'lat': 33.7723136, 'lng': -84.38736919999997}, 'end_point': {'lat': 33.7722607, 'lng': -84.38570179999999}}], 'traffic_speed_entry': [], 'via_waypoint': [], 'via_waypoints': []}], 'overview_polyline': 'ws_mE|m_bO@qBFuAB{C?_@YEcDI_AAmHCcCEg@ASDYF]Ty@j@o@^o@ROHMPIRs@T{At@]TcAr@s@p@s@v@eAxAqC|DaBzB{BrCkBxBuBpBw@r@{AbBe@n@aA|AcDrHyBrEg@z@]f@EAGCC?G@i@f@eAt@i@XWLw@Rs@L[@_@CYIYUKWEk@Ag@EIGy@Iq@Iu@Aa@?m@o@?iCAoBAoAAaCGDgB@eC@_B', 'summary': 'I-75 N/I-85 N', 'warnings': [], 'waypoint_order': [], 'overview_path': [{'lat': 33.754360000000005, 'lng': -84.37999}, {'lat': 33.75435, 'lng': -84.37942000000001}, {'lat': 33.754310000000004, 'lng': -84.37899}, {'lat': 33.754290000000005, 'lng': -84.37821000000001}, {'lat': 33.754290000000005, 'lng': -84.37805}, {'lat': 33.75442, 'lng': -84.37802}, {'lat': 33.75524, 'lng': -84.37797}, {'lat': 33.75556, 'lng': -84.37796}, {'lat': 33.757070000000006, 'lng': -84.37794000000001}, {'lat': 33.75773, 'lng': -84.37791}, {'lat': 33.75793, 'lng': -84.37790000000001}, {'lat': 33.758030000000005, 'lng': -84.37793}, {'lat': 33.758160000000004, 'lng': -84.37797}, {'lat': 33.75831, 'lng': -84.37808000000001}, {'lat': 33.7586, 'lng': -84.37830000000001}, {'lat': 33.75884, 'lng': -84.37846}, {'lat': 33.759080000000004, 'lng': -84.37856000000001}, {'lat': 33.75916, 'lng': -84.37861000000001}, {'lat': 33.75923, 'lng': -84.37870000000001}, {'lat': 33.759280000000004, 'lng': -84.37880000000001}, {'lat': 33.75954, 'lng': -84.37891}, {'lat': 33.760000000000005, 'lng': -84.37918}, {'lat': 33.76015, 'lng': -84.37929000000001}, {'lat': 33.760490000000004, 'lng': -84.37955000000001}, {'lat': 33.76075, 'lng': -84.3798}, {'lat': 33.761010000000006, 'lng': -84.38008}, {'lat': 33.76136, 'lng': -84.38053000000001}, {'lat': 33.76209, 'lng': -84.38148000000001}, {'lat': 33.76258, 'lng': -84.38210000000001}, {'lat': 33.763200000000005, 'lng': -84.38284}, {'lat': 33.763740000000006, 'lng': -84.38345000000001}, {'lat': 33.76433, 'lng': -84.38402}, {'lat': 33.764610000000005, 'lng': -84.38428}, {'lat': 33.76507, 'lng': -84.38478}, {'lat': 33.765260000000005, 'lng': -84.38502000000001}, {'lat': 33.76559, 'lng': -84.38549}, {'lat': 33.76641, 'lng': -84.38703000000001}, {'lat': 33.76702, 'lng': -84.38809}, {'lat': 33.76722, 'lng': -84.38839}, {'lat': 33.76737, 'lng': -84.38859000000001}, {'lat': 33.7674, 'lng': -84.38858}, {'lat': 33.76744, 'lng': -84.38856000000001}, {'lat': 33.76746, 'lng': -84.38856000000001}, {'lat': 33.767500000000005, 'lng': -84.38857}, {'lat': 33.76771, 'lng': -84.38877000000001}, {'lat': 33.768060000000006, 'lng': -84.38904000000001}, {'lat': 33.76827, 'lng': -84.38917000000001}, {'lat': 33.768390000000004, 'lng': -84.38924}, {'lat': 33.76867, 'lng': -84.38934}, {'lat': 33.768930000000005, 'lng': -84.38941000000001}, {'lat': 33.76907, 'lng': -84.38942}, {'lat': 33.76923, 'lng': -84.38940000000001}, {'lat': 33.769360000000006, 'lng': -84.38935000000001}, {'lat': 33.769490000000005, 'lng': -84.38924}, {'lat': 33.76955, 'lng': -84.38912}, {'lat': 33.769580000000005, 'lng': -84.3889}, {'lat': 33.76959, 'lng': -84.3887}, {'lat': 33.76962, 'lng': -84.38865000000001}, {'lat': 33.76966, 'lng': -84.38836}, {'lat': 33.76971, 'lng': -84.38811000000001}, {'lat': 33.769760000000005, 'lng': -84.38784000000001}, {'lat': 33.76977, 'lng': -84.38767}, {'lat': 33.76977, 'lng': -84.38744000000001}, {'lat': 33.770010000000006, 'lng': -84.38744000000001}, {'lat': 33.770700000000005, 'lng': -84.38743000000001}, {'lat': 33.771260000000005, 'lng': -84.38742}, {'lat': 33.771660000000004, 'lng': -84.38741}, {'lat': 33.772310000000004, 'lng': -84.38737}, {'lat': 33.77228, 'lng': -84.38685000000001}, {'lat': 33.772270000000006, 'lng': -84.38618000000001}, {'lat': 33.77226, 'lng': -84.3857}]}], 'status': 'OK', 'request': {'origin': {'lat': 33.7544084, 'lng': -84.3799879}, 'destination': {'lat': 33.7725845, 'lng': -84.38560280000002}, 'travelMode': 'DRIVING'}}, [['2017-07-02T18:00:00.000Z', '2017-07-03T00:00:00.000Z'], ['2017-07-03T18:00:00.000Z', '2017-07-04T00:00:00.000Z'], ['2017-07-04T18:00:00.000Z', '2017-07-05T00:00:00.000Z'], ['2017-07-05T18:00:00.000Z', '2017-07-06T00:00:00.000Z']]]
#traffic_server.traffic_data.get_historic_traffic_multiple_days(message[1], message[2])
# cProfile.run("traffic_server.traffic_data.get_historic_traffic_multiple_days(message[1], message[2])", "Profile.prof")
# s = pstats.Stats("Profile.prof")
# s.strip_dirs().sort_stats("time").print_stats()
| 464.033898
| 25,408
| 0.637592
| 3,700
| 27,378
| 4.677838
| 0.179459
| 0.129709
| 0.012018
| 0.017795
| 0.852843
| 0.833199
| 0.798937
| 0.743182
| 0.664895
| 0.66085
| 0
| 0.425297
| 0.091789
| 27,378
| 59
| 25,409
| 464.033898
| 0.270742
| 0.949668
| 0
| 0
| 0
| 0.064516
| 0.204006
| 0.01632
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.322581
| 0
| 0.354839
| 0.064516
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
0725f876fda05016b2e574e676160affd4ebff46
| 5,959
|
py
|
Python
|
tests/test_compression.py
|
Nicusor97/fastavro
|
0eb08b7b7fd762002712ddaa00c81915a03daa7a
|
[
"MIT"
] | null | null | null |
tests/test_compression.py
|
Nicusor97/fastavro
|
0eb08b7b7fd762002712ddaa00c81915a03daa7a
|
[
"MIT"
] | null | null | null |
tests/test_compression.py
|
Nicusor97/fastavro
|
0eb08b7b7fd762002712ddaa00c81915a03daa7a
|
[
"MIT"
] | null | null | null |
from io import BytesIO
import os
import pytest
import fastavro
@pytest.mark.parametrize("codec", ["null", "deflate", "bzip2"])
def test_builtin_codecs(codec):
schema = {
"doc": "A weather reading.",
"name": "Weather",
"namespace": "test",
"type": "record",
"fields": [
{"name": "station", "type": "string"},
{"name": "time", "type": "long"},
{"name": "temp", "type": "int"},
],
}
records = [
{"station": "011990-99999", "temp": 0, "time": 1433269388},
{"station": "011990-99999", "temp": 22, "time": 1433270389},
{"station": "011990-99999", "temp": -11, "time": 1433273379},
{"station": "012650-99999", "temp": 111, "time": 1433275478},
]
file = BytesIO()
fastavro.writer(file, schema, records, codec=codec)
file.seek(0)
out_records = list(fastavro.reader(file))
assert records == out_records
@pytest.mark.parametrize("codec", ["snappy", "zstandard", "lz4", "xz"])
@pytest.mark.skipif(os.name == "nt", reason="A pain to set up on windows")
def test_optional_codecs(codec):
schema = {
"doc": "A weather reading.",
"name": "Weather",
"namespace": "test",
"type": "record",
"fields": [
{"name": "station", "type": "string"},
{"name": "time", "type": "long"},
{"name": "temp", "type": "int"},
],
}
records = [
{"station": "011990-99999", "temp": 0, "time": 1433269388},
{"station": "011990-99999", "temp": 22, "time": 1433270389},
{"station": "011990-99999", "temp": -11, "time": 1433273379},
{"station": "012650-99999", "temp": 111, "time": 1433275478},
]
file = BytesIO()
fastavro.writer(file, schema, records, codec=codec)
file.seek(0)
out_records = list(fastavro.reader(file))
assert records == out_records
@pytest.mark.parametrize("codec", ["snappy", "zstandard", "lz4"])
@pytest.mark.skipif(os.name != "nt", reason="codec is present")
def test_optional_codecs_not_installed(codec):
schema = {
"doc": "A weather reading.",
"name": "Weather",
"namespace": "test",
"type": "record",
"fields": [
{"name": "station", "type": "string"},
{"name": "time", "type": "long"},
{"name": "temp", "type": "int"},
],
}
records = [
{"station": "011990-99999", "temp": 0, "time": 1433269388},
{"station": "011990-99999", "temp": 22, "time": 1433270389},
{"station": "011990-99999", "temp": -11, "time": 1433273379},
{"station": "012650-99999", "temp": 111, "time": 1433275478},
]
file = BytesIO()
with pytest.raises(
ValueError,
match="{} codec is supported but you need to install".format(codec)
):
fastavro.writer(file, schema, records, codec=codec)
@pytest.mark.skipif(os.name != "nt", reason="codec is present")
def test_xz_works_by_default_on_windows():
schema = {
"doc": "A weather reading.",
"name": "Weather",
"namespace": "test",
"type": "record",
"fields": [
{"name": "station", "type": "string"},
{"name": "time", "type": "long"},
{"name": "temp", "type": "int"},
],
}
records = [
{"station": "011990-99999", "temp": 0, "time": 1433269388},
{"station": "011990-99999", "temp": 22, "time": 1433270389},
{"station": "011990-99999", "temp": -11, "time": 1433273379},
{"station": "012650-99999", "temp": 111, "time": 1433275478},
]
file = BytesIO()
fastavro.writer(file, schema, records, codec="xz")
file.seek(0)
out_records = list(fastavro.reader(file))
assert records == out_records
def test_unsupported_codec():
schema = {
"doc": "A weather reading.",
"name": "Weather",
"namespace": "test",
"type": "record",
"fields": [
{"name": "station", "type": "string"},
{"name": "time", "type": "long"},
{"name": "temp", "type": "int"},
],
}
records = [
{"station": "011990-99999", "temp": 0, "time": 1433269388},
{"station": "011990-99999", "temp": 22, "time": 1433270389},
{"station": "011990-99999", "temp": -11, "time": 1433273379},
{"station": "012650-99999", "temp": 111, "time": 1433275478},
]
file = BytesIO()
with pytest.raises(ValueError, match="unrecognized codec"):
fastavro.writer(file, schema, records, codec="unsupported")
file = BytesIO()
fastavro.writer(file, schema, records, codec="deflate")
# Change the avro binary to act as if it were written with a codec called
# `unsupported`
modified_avro = file.getvalue().replace(b"\x0edeflate", b"\x16unsupported")
modified_file = BytesIO(modified_avro)
with pytest.raises(ValueError, match="Unrecognized codec"):
list(fastavro.reader(modified_file))
def test_compression_level():
"""https://github.com/fastavro/fastavro/issues/377"""
schema = {
"doc": "A weather reading.",
"name": "Weather",
"namespace": "test",
"type": "record",
"fields": [
{"name": "station", "type": "string"},
{"name": "time", "type": "long"},
{"name": "temp", "type": "int"},
],
}
records = [
{"station": "011990-99999", "temp": 0, "time": 1433269388},
{"station": "011990-99999", "temp": 22, "time": 1433270389},
{"station": "011990-99999", "temp": -11, "time": 1433273379},
{"station": "012650-99999", "temp": 111, "time": 1433275478},
]
file = BytesIO()
fastavro.writer(
file, schema, records, codec="deflate", codec_compression_level=9
)
file.seek(0)
out_records = list(fastavro.reader(file))
assert records == out_records
| 30.875648
| 79
| 0.540359
| 607
| 5,959
| 5.252059
| 0.181219
| 0.067754
| 0.101631
| 0.124216
| 0.830615
| 0.830615
| 0.830615
| 0.775094
| 0.775094
| 0.75596
| 0
| 0.128112
| 0.2586
| 5,959
| 192
| 80
| 31.036458
| 0.593481
| 0.022487
| 0
| 0.735484
| 0
| 0
| 0.273977
| 0
| 0
| 0
| 0
| 0
| 0.025806
| 1
| 0.03871
| false
| 0
| 0.025806
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
075694fec102c6716b357b538fd0c40397198cd1
| 6,812
|
py
|
Python
|
tests/test_pose_graphs.py
|
AvanDavad/jaxfg
|
6d1559126ba872b452eca6a13c2688349f1c5f7e
|
[
"MIT"
] | 120
|
2020-11-28T19:43:31.000Z
|
2022-03-29T02:35:46.000Z
|
tests/test_pose_graphs.py
|
AvanDavad/jaxfg
|
6d1559126ba872b452eca6a13c2688349f1c5f7e
|
[
"MIT"
] | 12
|
2021-05-24T09:02:12.000Z
|
2022-03-30T19:51:40.000Z
|
tests/test_pose_graphs.py
|
AvanDavad/jaxfg
|
6d1559126ba872b452eca6a13c2688349f1c5f7e
|
[
"MIT"
] | 9
|
2021-05-06T15:31:23.000Z
|
2022-03-23T12:06:44.000Z
|
"""Simple pose graph tests. Could use cleanup/refactoring."""
from typing import List
import jaxlie
from jax import numpy as jnp
import jaxfg
def test_pose_graph_gauss_newton():
pose_variables = [
jaxfg.geometry.SE2Variable(),
jaxfg.geometry.SE2Variable(),
]
factors: List[jaxfg.core.FactorBase] = [
jaxfg.geometry.PriorFactor.make(
variable=pose_variables[0],
mu=jaxlie.SE2.from_xy_theta(0.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=[1.0, 1.0, 1.0]
),
),
jaxfg.geometry.PriorFactor.make(
variable=pose_variables[1],
mu=jaxlie.SE2.from_xy_theta(2.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
jaxfg.geometry.BetweenFactor.make(
variable_T_world_a=pose_variables[0],
variable_T_world_b=pose_variables[1],
T_a_b=jaxlie.SE2.from_xy_theta(1.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
]
graph = jaxfg.core.StackedFactorGraph.make(factors)
initial_assignments = jaxfg.core.VariableAssignments.make_from_defaults(
pose_variables
)
solution_assignments = graph.solve(
initial_assignments,
solver=jaxfg.solvers.GaussNewtonSolver(
linear_solver=jaxfg.sparse.CholmodSolver()
),
)
assert graph.compute_joint_nll(initial_assignments) > graph.compute_joint_nll(
solution_assignments
)
assert type(repr(solution_assignments)) == str
assert isinstance(solution_assignments.get_value(pose_variables[0]), jaxlie.SE2)
assert isinstance(
solution_assignments.get_stacked_value(jaxfg.geometry.SE2Variable), jaxlie.SE2
)
assert jnp.all(
solution_assignments.get_value(pose_variables[0]).parameters()
== solution_assignments.get_stacked_value(
jaxfg.geometry.SE2Variable
).parameters()[0]
)
assert jnp.all(
solution_assignments.get_value(pose_variables[1]).parameters()
== solution_assignments.get_stacked_value(
jaxfg.geometry.SE2Variable
).parameters()[1]
)
def test_pose_graph_levenberg_marquardt():
pose_variables = [
jaxfg.geometry.SE2Variable(),
jaxfg.geometry.SE2Variable(),
]
factors: List[jaxfg.core.FactorBase] = [
jaxfg.geometry.PriorFactor.make(
variable=pose_variables[0],
mu=jaxlie.SE2.from_xy_theta(0.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
jaxfg.geometry.PriorFactor.make(
variable=pose_variables[1],
mu=jaxlie.SE2.from_xy_theta(2.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
jaxfg.geometry.BetweenFactor.make(
variable_T_world_a=pose_variables[0],
variable_T_world_b=pose_variables[1],
T_a_b=jaxlie.SE2.from_xy_theta(1.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
]
graph = jaxfg.core.StackedFactorGraph.make(factors)
initial_assignments = jaxfg.core.VariableAssignments.make_from_defaults(
pose_variables
)
solution_assignments = graph.solve(
initial_assignments,
solver=jaxfg.solvers.LevenbergMarquardtSolver(
linear_solver=jaxfg.sparse.ConjugateGradientSolver()
),
)
assert graph.compute_joint_nll(initial_assignments) > graph.compute_joint_nll(
solution_assignments
)
assert type(repr(solution_assignments)) == str
assert isinstance(solution_assignments.get_value(pose_variables[0]), jaxlie.SE2)
assert isinstance(
solution_assignments.get_stacked_value(jaxfg.geometry.SE2Variable), jaxlie.SE2
)
assert jnp.all(
solution_assignments.get_value(pose_variables[0]).parameters()
== solution_assignments.get_stacked_value(
jaxfg.geometry.SE2Variable
).parameters()[0]
)
assert jnp.all(
solution_assignments.get_value(pose_variables[1]).parameters()
== solution_assignments.get_stacked_value(
jaxfg.geometry.SE2Variable
).parameters()[1]
)
def test_pose_graph_dogleg():
pose_variables = [
jaxfg.geometry.SE2Variable(),
jaxfg.geometry.SE2Variable(),
]
factors: List[jaxfg.core.FactorBase] = [
jaxfg.geometry.PriorFactor.make(
variable=pose_variables[0],
mu=jaxlie.SE2.from_xy_theta(0.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
jaxfg.geometry.PriorFactor.make(
variable=pose_variables[1],
mu=jaxlie.SE2.from_xy_theta(2.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
jaxfg.geometry.BetweenFactor.make(
variable_T_world_a=pose_variables[0],
variable_T_world_b=pose_variables[1],
T_a_b=jaxlie.SE2.from_xy_theta(1.0, 0.0, 0.0),
noise_model=jaxfg.noises.DiagonalGaussian.make_from_covariance(
diagonal=jnp.ones(3)
),
),
]
graph = jaxfg.core.StackedFactorGraph.make(factors)
initial_assignments = jaxfg.core.VariableAssignments.make_from_defaults(
pose_variables
)
solution_assignments = graph.solve(
initial_assignments,
solver=jaxfg.solvers.DoglegSolver(),
)
assert graph.compute_joint_nll(initial_assignments) > graph.compute_joint_nll(
solution_assignments
)
assert type(repr(solution_assignments)) == str
assert isinstance(solution_assignments.get_value(pose_variables[0]), jaxlie.SE2)
assert isinstance(
solution_assignments.get_stacked_value(jaxfg.geometry.SE2Variable), jaxlie.SE2
)
assert jnp.all(
solution_assignments.get_value(pose_variables[0]).parameters()
== solution_assignments.get_stacked_value(
jaxfg.geometry.SE2Variable
).parameters()[0]
)
assert jnp.all(
solution_assignments.get_value(pose_variables[1]).parameters()
== solution_assignments.get_stacked_value(
jaxfg.geometry.SE2Variable
).parameters()[1]
)
| 33.722772
| 86
| 0.645038
| 745
| 6,812
| 5.642953
| 0.111409
| 0.018554
| 0.021408
| 0.019981
| 0.927688
| 0.927688
| 0.927688
| 0.927688
| 0.927688
| 0.927688
| 0
| 0.024563
| 0.252936
| 6,812
| 201
| 87
| 33.890547
| 0.801533
| 0.008074
| 0
| 0.793296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100559
| 1
| 0.01676
| false
| 0
| 0.022346
| 0
| 0.039106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4acafca48ff8afa5872f65f4a66b0bf1b1d6d117
| 160
|
py
|
Python
|
goodman_focus/__init__.py
|
soar-telescope/goodman_focus_finder
|
4d933b8bd0aa45a3eb8b15799c71bbb8c5a8740c
|
[
"BSD-3-Clause"
] | null | null | null |
goodman_focus/__init__.py
|
soar-telescope/goodman_focus_finder
|
4d933b8bd0aa45a3eb8b15799c71bbb8c5a8740c
|
[
"BSD-3-Clause"
] | null | null | null |
goodman_focus/__init__.py
|
soar-telescope/goodman_focus_finder
|
4d933b8bd0aa45a3eb8b15799c71bbb8c5a8740c
|
[
"BSD-3-Clause"
] | 1
|
2019-06-06T20:47:49.000Z
|
2019-06-06T20:47:49.000Z
|
from .version import __version__ # noqa: F401
from .goodman_focus import GoodmanFocus # noqa: F401
from .goodman_focus import run_goodman_focus # noqa: F401
| 40
| 58
| 0.79375
| 22
| 160
| 5.409091
| 0.409091
| 0.201681
| 0.201681
| 0.319328
| 0.504202
| 0.504202
| 0
| 0
| 0
| 0
| 0
| 0.066176
| 0.15
| 160
| 3
| 59
| 53.333333
| 0.808824
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4accbdecf027711c76323e4534c0ba77fa454f86
| 20,230
|
py
|
Python
|
sdk/python/pulumi_akamai/app_sec_rate_policy_action.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-01-21T15:22:12.000Z
|
2021-08-25T14:15:29.000Z
|
sdk/python/pulumi_akamai/app_sec_rate_policy_action.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-08-13T14:39:36.000Z
|
2022-03-31T15:19:48.000Z
|
sdk/python/pulumi_akamai/app_sec_rate_policy_action.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AppSecRatePolicyActionArgs', 'AppSecRatePolicyAction']
@pulumi.input_type
class AppSecRatePolicyActionArgs:
def __init__(__self__, *,
config_id: pulumi.Input[int],
ipv4_action: pulumi.Input[str],
ipv6_action: pulumi.Input[str],
rate_policy_id: pulumi.Input[int],
security_policy_id: pulumi.Input[str]):
"""
The set of arguments for constructing a AppSecRatePolicyAction resource.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the rate policy action being modified.
:param pulumi.Input[str] ipv4_action: . Rate policy action for requests coming from an IPv4 IP address. Allowed actions are:
- **alert**. Record the event,
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
- **none**. Take no action.
:param pulumi.Input[str] ipv6_action: . Rate policy action for requests coming from an IPv6 IP address. Allowed actions are:
- **alert**. Record the event.
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
:param pulumi.Input[int] rate_policy_id: . Unique identifier of the rate policy whose action is being modified.
"""
pulumi.set(__self__, "config_id", config_id)
pulumi.set(__self__, "ipv4_action", ipv4_action)
pulumi.set(__self__, "ipv6_action", ipv6_action)
pulumi.set(__self__, "rate_policy_id", rate_policy_id)
pulumi.set(__self__, "security_policy_id", security_policy_id)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Input[int]:
"""
. Unique identifier of the security configuration associated with the rate policy action being modified.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: pulumi.Input[int]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter(name="ipv4Action")
def ipv4_action(self) -> pulumi.Input[str]:
"""
. Rate policy action for requests coming from an IPv4 IP address. Allowed actions are:
- **alert**. Record the event,
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
- **none**. Take no action.
"""
return pulumi.get(self, "ipv4_action")
@ipv4_action.setter
def ipv4_action(self, value: pulumi.Input[str]):
pulumi.set(self, "ipv4_action", value)
@property
@pulumi.getter(name="ipv6Action")
def ipv6_action(self) -> pulumi.Input[str]:
"""
. Rate policy action for requests coming from an IPv6 IP address. Allowed actions are:
- **alert**. Record the event.
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
"""
return pulumi.get(self, "ipv6_action")
@ipv6_action.setter
def ipv6_action(self, value: pulumi.Input[str]):
pulumi.set(self, "ipv6_action", value)
@property
@pulumi.getter(name="ratePolicyId")
def rate_policy_id(self) -> pulumi.Input[int]:
"""
. Unique identifier of the rate policy whose action is being modified.
"""
return pulumi.get(self, "rate_policy_id")
@rate_policy_id.setter
def rate_policy_id(self, value: pulumi.Input[int]):
pulumi.set(self, "rate_policy_id", value)
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "security_policy_id")
@security_policy_id.setter
def security_policy_id(self, value: pulumi.Input[str]):
pulumi.set(self, "security_policy_id", value)
@pulumi.input_type
class _AppSecRatePolicyActionState:
def __init__(__self__, *,
config_id: Optional[pulumi.Input[int]] = None,
ipv4_action: Optional[pulumi.Input[str]] = None,
ipv6_action: Optional[pulumi.Input[str]] = None,
rate_policy_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AppSecRatePolicyAction resources.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the rate policy action being modified.
:param pulumi.Input[str] ipv4_action: . Rate policy action for requests coming from an IPv4 IP address. Allowed actions are:
- **alert**. Record the event,
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
- **none**. Take no action.
:param pulumi.Input[str] ipv6_action: . Rate policy action for requests coming from an IPv6 IP address. Allowed actions are:
- **alert**. Record the event.
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
:param pulumi.Input[int] rate_policy_id: . Unique identifier of the rate policy whose action is being modified.
"""
if config_id is not None:
pulumi.set(__self__, "config_id", config_id)
if ipv4_action is not None:
pulumi.set(__self__, "ipv4_action", ipv4_action)
if ipv6_action is not None:
pulumi.set(__self__, "ipv6_action", ipv6_action)
if rate_policy_id is not None:
pulumi.set(__self__, "rate_policy_id", rate_policy_id)
if security_policy_id is not None:
pulumi.set(__self__, "security_policy_id", security_policy_id)
@property
@pulumi.getter(name="configId")
def config_id(self) -> Optional[pulumi.Input[int]]:
"""
. Unique identifier of the security configuration associated with the rate policy action being modified.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter(name="ipv4Action")
def ipv4_action(self) -> Optional[pulumi.Input[str]]:
"""
. Rate policy action for requests coming from an IPv4 IP address. Allowed actions are:
- **alert**. Record the event,
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
- **none**. Take no action.
"""
return pulumi.get(self, "ipv4_action")
@ipv4_action.setter
def ipv4_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ipv4_action", value)
@property
@pulumi.getter(name="ipv6Action")
def ipv6_action(self) -> Optional[pulumi.Input[str]]:
"""
. Rate policy action for requests coming from an IPv6 IP address. Allowed actions are:
- **alert**. Record the event.
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
"""
return pulumi.get(self, "ipv6_action")
@ipv6_action.setter
def ipv6_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ipv6_action", value)
@property
@pulumi.getter(name="ratePolicyId")
def rate_policy_id(self) -> Optional[pulumi.Input[int]]:
"""
. Unique identifier of the rate policy whose action is being modified.
"""
return pulumi.get(self, "rate_policy_id")
@rate_policy_id.setter
def rate_policy_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "rate_policy_id", value)
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "security_policy_id")
@security_policy_id.setter
def security_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy_id", value)
class AppSecRatePolicyAction(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[int]] = None,
ipv4_action: Optional[pulumi.Input[str]] = None,
ipv6_action: Optional[pulumi.Input[str]] = None,
rate_policy_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
**Scopes**: Rate policy
Creates, modifies or deletes the actions associated with a rate policy.
By default, rate policies take no action when triggered.
Note that you must set separate actions for requests originating from an IPv4 IP address and for requests originating from an IPv6 address.
**Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/security-policies/{policyId}/rate-policies/{ratePolicyId}](https://developer.akamai.com/api/cloud_security/application_security/v1.html#putactionsperratepolicy)
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
configuration = akamai.get_app_sec_configuration(name="Documentation")
appsec_rate_policy = akamai.AppSecRatePolicy("appsecRatePolicy",
config_id=configuration.config_id,
rate_policy=(lambda path: open(path).read())(f"{path['module']}/rate_policy.json"))
appsec_rate_policy_action = akamai.AppSecRatePolicyAction("appsecRatePolicyAction",
config_id=configuration.config_id,
security_policy_id="gms1_134637",
rate_policy_id=appsec_rate_policy.rate_policy_id,
ipv4_action="deny",
ipv6_action="deny")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the rate policy action being modified.
:param pulumi.Input[str] ipv4_action: . Rate policy action for requests coming from an IPv4 IP address. Allowed actions are:
- **alert**. Record the event,
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
- **none**. Take no action.
:param pulumi.Input[str] ipv6_action: . Rate policy action for requests coming from an IPv6 IP address. Allowed actions are:
- **alert**. Record the event.
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
:param pulumi.Input[int] rate_policy_id: . Unique identifier of the rate policy whose action is being modified.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AppSecRatePolicyActionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
**Scopes**: Rate policy
Creates, modifies or deletes the actions associated with a rate policy.
By default, rate policies take no action when triggered.
Note that you must set separate actions for requests originating from an IPv4 IP address and for requests originating from an IPv6 address.
**Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/security-policies/{policyId}/rate-policies/{ratePolicyId}](https://developer.akamai.com/api/cloud_security/application_security/v1.html#putactionsperratepolicy)
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
configuration = akamai.get_app_sec_configuration(name="Documentation")
appsec_rate_policy = akamai.AppSecRatePolicy("appsecRatePolicy",
config_id=configuration.config_id,
rate_policy=(lambda path: open(path).read())(f"{path['module']}/rate_policy.json"))
appsec_rate_policy_action = akamai.AppSecRatePolicyAction("appsecRatePolicyAction",
config_id=configuration.config_id,
security_policy_id="gms1_134637",
rate_policy_id=appsec_rate_policy.rate_policy_id,
ipv4_action="deny",
ipv6_action="deny")
```
:param str resource_name: The name of the resource.
:param AppSecRatePolicyActionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AppSecRatePolicyActionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[int]] = None,
ipv4_action: Optional[pulumi.Input[str]] = None,
ipv6_action: Optional[pulumi.Input[str]] = None,
rate_policy_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AppSecRatePolicyActionArgs.__new__(AppSecRatePolicyActionArgs)
if config_id is None and not opts.urn:
raise TypeError("Missing required property 'config_id'")
__props__.__dict__["config_id"] = config_id
if ipv4_action is None and not opts.urn:
raise TypeError("Missing required property 'ipv4_action'")
__props__.__dict__["ipv4_action"] = ipv4_action
if ipv6_action is None and not opts.urn:
raise TypeError("Missing required property 'ipv6_action'")
__props__.__dict__["ipv6_action"] = ipv6_action
if rate_policy_id is None and not opts.urn:
raise TypeError("Missing required property 'rate_policy_id'")
__props__.__dict__["rate_policy_id"] = rate_policy_id
if security_policy_id is None and not opts.urn:
raise TypeError("Missing required property 'security_policy_id'")
__props__.__dict__["security_policy_id"] = security_policy_id
super(AppSecRatePolicyAction, __self__).__init__(
'akamai:index/appSecRatePolicyAction:AppSecRatePolicyAction',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[int]] = None,
ipv4_action: Optional[pulumi.Input[str]] = None,
ipv6_action: Optional[pulumi.Input[str]] = None,
rate_policy_id: Optional[pulumi.Input[int]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None) -> 'AppSecRatePolicyAction':
"""
Get an existing AppSecRatePolicyAction resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the rate policy action being modified.
:param pulumi.Input[str] ipv4_action: . Rate policy action for requests coming from an IPv4 IP address. Allowed actions are:
- **alert**. Record the event,
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
- **none**. Take no action.
:param pulumi.Input[str] ipv6_action: . Rate policy action for requests coming from an IPv6 IP address. Allowed actions are:
- **alert**. Record the event.
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
:param pulumi.Input[int] rate_policy_id: . Unique identifier of the rate policy whose action is being modified.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AppSecRatePolicyActionState.__new__(_AppSecRatePolicyActionState)
__props__.__dict__["config_id"] = config_id
__props__.__dict__["ipv4_action"] = ipv4_action
__props__.__dict__["ipv6_action"] = ipv6_action
__props__.__dict__["rate_policy_id"] = rate_policy_id
__props__.__dict__["security_policy_id"] = security_policy_id
return AppSecRatePolicyAction(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Output[int]:
"""
. Unique identifier of the security configuration associated with the rate policy action being modified.
"""
return pulumi.get(self, "config_id")
@property
@pulumi.getter(name="ipv4Action")
def ipv4_action(self) -> pulumi.Output[str]:
"""
. Rate policy action for requests coming from an IPv4 IP address. Allowed actions are:
- **alert**. Record the event,
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
- **none**. Take no action.
"""
return pulumi.get(self, "ipv4_action")
@property
@pulumi.getter(name="ipv6Action")
def ipv6_action(self) -> pulumi.Output[str]:
"""
. Rate policy action for requests coming from an IPv6 IP address. Allowed actions are:
- **alert**. Record the event.
- **deny**. Block the request.
- **deny_custom{custom_deny_id}**. Take the action specified by the custom deny.
"""
return pulumi.get(self, "ipv6_action")
@property
@pulumi.getter(name="ratePolicyId")
def rate_policy_id(self) -> pulumi.Output[int]:
"""
. Unique identifier of the rate policy whose action is being modified.
"""
return pulumi.get(self, "rate_policy_id")
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "security_policy_id")
| 47.156177
| 250
| 0.649679
| 2,396
| 20,230
| 5.24374
| 0.085977
| 0.062082
| 0.041229
| 0.031519
| 0.848615
| 0.83206
| 0.82617
| 0.803247
| 0.783429
| 0.772923
| 0
| 0.007323
| 0.250766
| 20,230
| 428
| 251
| 47.266355
| 0.821601
| 0.434701
| 0
| 0.584541
| 1
| 0
| 0.120742
| 0.012637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.154589
| false
| 0.004831
| 0.024155
| 0.014493
| 0.270531
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ab04daeb3315d86923fbac88235ead8d6157a357
| 45,522
|
py
|
Python
|
dags/kd03-dags/KD_FACTOR_DEAP_AND_CHECK.py
|
ywf5566/airflow
|
e7872dddbf275729b2c42e2a4ff602a6df7d1536
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
dags/kd03-dags/KD_FACTOR_DEAP_AND_CHECK.py
|
ywf5566/airflow
|
e7872dddbf275729b2c42e2a4ff602a6df7d1536
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
dags/kd03-dags/KD_FACTOR_DEAP_AND_CHECK.py
|
ywf5566/airflow
|
e7872dddbf275729b2c42e2a4ff602a6df7d1536
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.contrib.operators.ssh_operator import SSHOperator
default_args = {'owner': 'afroot03', 'retries': 2, 'retry_delay': timedelta(minutes=1)}
dag = DAG('KD-FACTOR-DEAP-AND-CHECK',
default_args=default_args,
schedule_interval=None,
catchup=False,
start_date=datetime(2020, 12, 17, 17, 0))
# ============================================== tasks ==================================================
l2_factor_check = BashOperator(task_id="l2_factor_check", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-repo-dep-check.sh l2_factor_check ", dag=dag, pool="factor")
check_qsdata = BashOperator(task_id="check_qsdata", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-repo-dep-check.sh check_qsdata ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_8 = BashOperator(task_id="fac_daily_kd_deap_factor_8", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531307 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_2 = BashOperator(task_id="fac_daily_kd_deap_factor_2", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531301 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_3 = BashOperator(task_id="fac_daily_kd_deap_factor_3", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531302 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_1 = BashOperator(task_id="fac_daily_kd_deap_factor_1", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531300 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_6 = BashOperator(task_id="fac_daily_kd_deap_factor_6", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531305 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_7 = BashOperator(task_id="fac_daily_kd_deap_factor_7", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531306 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_4 = BashOperator(task_id="fac_daily_kd_deap_factor_4", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531303 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5 = BashOperator(task_id="fac_daily_kd_deap_factor_5", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531304 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_18 = BashOperator(task_id="fac_daily_kd_deap_factor_18", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531317 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_19 = BashOperator(task_id="fac_daily_kd_deap_factor_19", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531318 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_23 = BashOperator(task_id="fac_daily_kd_deap_factor_23", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531322 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_24 = BashOperator(task_id="fac_daily_kd_deap_factor_24", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531323 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_25 = BashOperator(task_id="fac_daily_kd_deap_factor_25", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531324 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_11 = BashOperator(task_id="fac_daily_kd_deap_factor_11", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531310 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_12 = BashOperator(task_id="fac_daily_kd_deap_factor_12", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531311 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_13 = BashOperator(task_id="fac_daily_kd_deap_factor_13", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531312 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_14 = BashOperator(task_id="fac_daily_kd_deap_factor_14", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531313 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_20 = BashOperator(task_id="fac_daily_kd_deap_factor_20", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531319 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_16 = BashOperator(task_id="fac_daily_kd_deap_factor_16", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531315 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_17 = BashOperator(task_id="fac_daily_kd_deap_factor_17", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531316 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_35 = BashOperator(task_id="fac_daily_kd_deap_factor_35", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531334 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_29 = BashOperator(task_id="fac_daily_kd_deap_factor_29", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531328 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_30 = BashOperator(task_id="fac_daily_kd_deap_factor_30", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531329 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_27 = BashOperator(task_id="fac_daily_kd_deap_factor_27", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531326 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_33 = BashOperator(task_id="fac_daily_kd_deap_factor_33", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531332 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_34 = BashOperator(task_id="fac_daily_kd_deap_factor_34", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531333 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_31 = BashOperator(task_id="fac_daily_kd_deap_factor_31", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531330 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_32 = BashOperator(task_id="fac_daily_kd_deap_factor_32", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531331 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_43 = BashOperator(task_id="fac_daily_kd_deap_factor_43", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531342 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_44 = BashOperator(task_id="fac_daily_kd_deap_factor_44", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531343 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_47 = BashOperator(task_id="fac_daily_kd_deap_factor_47", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531346 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_48 = BashOperator(task_id="fac_daily_kd_deap_factor_48", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531347 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_51 = BashOperator(task_id="fac_daily_kd_deap_factor_51", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531350 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_36 = BashOperator(task_id="fac_daily_kd_deap_factor_36", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531335 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_37 = BashOperator(task_id="fac_daily_kd_deap_factor_37", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531336 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_38 = BashOperator(task_id="fac_daily_kd_deap_factor_38", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531337 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_39 = BashOperator(task_id="fac_daily_kd_deap_factor_39", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531338 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_45 = BashOperator(task_id="fac_daily_kd_deap_factor_45", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531344 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_40 = BashOperator(task_id="fac_daily_kd_deap_factor_40", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531339 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_41 = BashOperator(task_id="fac_daily_kd_deap_factor_41", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531340 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_64 = BashOperator(task_id="fac_daily_kd_deap_factor_64", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531363 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_55 = BashOperator(task_id="fac_daily_kd_deap_factor_55", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531354 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_56 = BashOperator(task_id="fac_daily_kd_deap_factor_56", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531355 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_52 = BashOperator(task_id="fac_daily_kd_deap_factor_52", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531351 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_60 = BashOperator(task_id="fac_daily_kd_deap_factor_60", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531359 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_62 = BashOperator(task_id="fac_daily_kd_deap_factor_62", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531361 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_58 = BashOperator(task_id="fac_daily_kd_deap_factor_58", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531357 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_59 = BashOperator(task_id="fac_daily_kd_deap_factor_59", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531358 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_72 = BashOperator(task_id="fac_daily_kd_deap_factor_72", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531371 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_73 = BashOperator(task_id="fac_daily_kd_deap_factor_73", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531372 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_75 = BashOperator(task_id="fac_daily_kd_deap_factor_75", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531374 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_76 = BashOperator(task_id="fac_daily_kd_deap_factor_76", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531375 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_77 = BashOperator(task_id="fac_daily_kd_deap_factor_77", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531376 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_65 = BashOperator(task_id="fac_daily_kd_deap_factor_65", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531364 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_66 = BashOperator(task_id="fac_daily_kd_deap_factor_66", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531365 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_67 = BashOperator(task_id="fac_daily_kd_deap_factor_67", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531366 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_69 = BashOperator(task_id="fac_daily_kd_deap_factor_69", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531368 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_74 = BashOperator(task_id="fac_daily_kd_deap_factor_74", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531373 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_70 = BashOperator(task_id="fac_daily_kd_deap_factor_70", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531369 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_71 = BashOperator(task_id="fac_daily_kd_deap_factor_71", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531370 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_87 = BashOperator(task_id="fac_daily_kd_deap_factor_87", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531386 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_80 = BashOperator(task_id="fac_daily_kd_deap_factor_80", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531379 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_81 = BashOperator(task_id="fac_daily_kd_deap_factor_81", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531380 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_78 = BashOperator(task_id="fac_daily_kd_deap_factor_78", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531377 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_85 = BashOperator(task_id="fac_daily_kd_deap_factor_85", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531384 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_86 = BashOperator(task_id="fac_daily_kd_deap_factor_86", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531385 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_83 = BashOperator(task_id="fac_daily_kd_deap_factor_83", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531382 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_84 = BashOperator(task_id="fac_daily_kd_deap_factor_84", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531383 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_88 = BashOperator(task_id="fac_daily_kd_deap_factor_88", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531387 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_89 = BashOperator(task_id="fac_daily_kd_deap_factor_89", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531388 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_91 = BashOperator(task_id="fac_daily_kd_deap_factor_91", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531390 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_92 = BashOperator(task_id="fac_daily_kd_deap_factor_92", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531391 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_93 = BashOperator(task_id="fac_daily_kd_deap_factor_93", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531392 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_94 = BashOperator(task_id="fac_daily_kd_deap_factor_94", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 3531393 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948155 = BashOperator(task_id="fac_daily_kd_deap_factor_5948155", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948155 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948156 = BashOperator(task_id="fac_daily_kd_deap_factor_5948156", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948156 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948157 = BashOperator(task_id="fac_daily_kd_deap_factor_5948157", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948157 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948158 = BashOperator(task_id="fac_daily_kd_deap_factor_5948158", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948158 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948159 = BashOperator(task_id="fac_daily_kd_deap_factor_5948159", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948159 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948160 = BashOperator(task_id="fac_daily_kd_deap_factor_5948160", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948160 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948161 = BashOperator(task_id="fac_daily_kd_deap_factor_5948161", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948161 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948162 = BashOperator(task_id="fac_daily_kd_deap_factor_5948162", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948162 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948163 = BashOperator(task_id="fac_daily_kd_deap_factor_5948163", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948163 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948164 = BashOperator(task_id="fac_daily_kd_deap_factor_5948164", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948164 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948165 = BashOperator(task_id="fac_daily_kd_deap_factor_5948165", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948165 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948166 = BashOperator(task_id="fac_daily_kd_deap_factor_5948166", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948166 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948167 = BashOperator(task_id="fac_daily_kd_deap_factor_5948167", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948167 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948168 = BashOperator(task_id="fac_daily_kd_deap_factor_5948168", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948168 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948169 = BashOperator(task_id="fac_daily_kd_deap_factor_5948169", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948169 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948170 = BashOperator(task_id="fac_daily_kd_deap_factor_5948170", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948170 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948171 = BashOperator(task_id="fac_daily_kd_deap_factor_5948171", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948171 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948172 = BashOperator(task_id="fac_daily_kd_deap_factor_5948172", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948172 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948173 = BashOperator(task_id="fac_daily_kd_deap_factor_5948173", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948173 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948174 = BashOperator(task_id="fac_daily_kd_deap_factor_5948174", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948174 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948175 = BashOperator(task_id="fac_daily_kd_deap_factor_5948175", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948175 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948176 = BashOperator(task_id="fac_daily_kd_deap_factor_5948176", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948176 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948177 = BashOperator(task_id="fac_daily_kd_deap_factor_5948177", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948177 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948178 = BashOperator(task_id="fac_daily_kd_deap_factor_5948178", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948178 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948179 = BashOperator(task_id="fac_daily_kd_deap_factor_5948179", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948179 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948180 = BashOperator(task_id="fac_daily_kd_deap_factor_5948180", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948180 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948181 = BashOperator(task_id="fac_daily_kd_deap_factor_5948181", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948181 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948182 = BashOperator(task_id="fac_daily_kd_deap_factor_5948182", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948182 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948183 = BashOperator(task_id="fac_daily_kd_deap_factor_5948183", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948183 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948184 = BashOperator(task_id="fac_daily_kd_deap_factor_5948184", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948184 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948185 = BashOperator(task_id="fac_daily_kd_deap_factor_5948185", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948185 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948186 = BashOperator(task_id="fac_daily_kd_deap_factor_5948186", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948186 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948187 = BashOperator(task_id="fac_daily_kd_deap_factor_5948187", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948187 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948188 = BashOperator(task_id="fac_daily_kd_deap_factor_5948188", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948188 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948189 = BashOperator(task_id="fac_daily_kd_deap_factor_5948189", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948189 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948190 = BashOperator(task_id="fac_daily_kd_deap_factor_5948190", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948190 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948191 = BashOperator(task_id="fac_daily_kd_deap_factor_5948191", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948191 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948192 = BashOperator(task_id="fac_daily_kd_deap_factor_5948192", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948192 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948193 = BashOperator(task_id="fac_daily_kd_deap_factor_5948193", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948193 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948194 = BashOperator(task_id="fac_daily_kd_deap_factor_5948194", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948194 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948195 = BashOperator(task_id="fac_daily_kd_deap_factor_5948195", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948195 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948196 = BashOperator(task_id="fac_daily_kd_deap_factor_5948196", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948196 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948197 = BashOperator(task_id="fac_daily_kd_deap_factor_5948197", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948197 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948198 = BashOperator(task_id="fac_daily_kd_deap_factor_5948198", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948198 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948199 = BashOperator(task_id="fac_daily_kd_deap_factor_5948199", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948199 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948200 = BashOperator(task_id="fac_daily_kd_deap_factor_5948200", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948200 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948201 = BashOperator(task_id="fac_daily_kd_deap_factor_5948201", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948201 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948202 = BashOperator(task_id="fac_daily_kd_deap_factor_5948202", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948202 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948203 = BashOperator(task_id="fac_daily_kd_deap_factor_5948203", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948203 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948204 = BashOperator(task_id="fac_daily_kd_deap_factor_5948204", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948204 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948205 = BashOperator(task_id="fac_daily_kd_deap_factor_5948205", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948205 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948206 = BashOperator(task_id="fac_daily_kd_deap_factor_5948206", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948206 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948207 = BashOperator(task_id="fac_daily_kd_deap_factor_5948207", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948207 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948208 = BashOperator(task_id="fac_daily_kd_deap_factor_5948208", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948208 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948209 = BashOperator(task_id="fac_daily_kd_deap_factor_5948209", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948209 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948210 = BashOperator(task_id="fac_daily_kd_deap_factor_5948210", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948210 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948211 = BashOperator(task_id="fac_daily_kd_deap_factor_5948211", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948211 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948212 = BashOperator(task_id="fac_daily_kd_deap_factor_5948212", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948212 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948213 = BashOperator(task_id="fac_daily_kd_deap_factor_5948213", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948213 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948214 = BashOperator(task_id="fac_daily_kd_deap_factor_5948214", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948214 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948215 = BashOperator(task_id="fac_daily_kd_deap_factor_5948215", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948215 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948216 = BashOperator(task_id="fac_daily_kd_deap_factor_5948216", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948216 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948217 = BashOperator(task_id="fac_daily_kd_deap_factor_5948217", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948217 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948218 = BashOperator(task_id="fac_daily_kd_deap_factor_5948218", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948218 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948219 = BashOperator(task_id="fac_daily_kd_deap_factor_5948219", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948219 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948220 = BashOperator(task_id="fac_daily_kd_deap_factor_5948220", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948220 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948221 = BashOperator(task_id="fac_daily_kd_deap_factor_5948221", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948221 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948222 = BashOperator(task_id="fac_daily_kd_deap_factor_5948222", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948222 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948223 = BashOperator(task_id="fac_daily_kd_deap_factor_5948223", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948223 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948224 = BashOperator(task_id="fac_daily_kd_deap_factor_5948224", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948224 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948225 = BashOperator(task_id="fac_daily_kd_deap_factor_5948225", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948225 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948226 = BashOperator(task_id="fac_daily_kd_deap_factor_5948226", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948226 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948227 = BashOperator(task_id="fac_daily_kd_deap_factor_5948227", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948227 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948228 = BashOperator(task_id="fac_daily_kd_deap_factor_5948228", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948228 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948229 = BashOperator(task_id="fac_daily_kd_deap_factor_5948229", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948229 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948230 = BashOperator(task_id="fac_daily_kd_deap_factor_5948230", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948230 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948231 = BashOperator(task_id="fac_daily_kd_deap_factor_5948231", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948231 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948232 = BashOperator(task_id="fac_daily_kd_deap_factor_5948232", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948232 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948233 = BashOperator(task_id="fac_daily_kd_deap_factor_5948233", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948233 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948234 = BashOperator(task_id="fac_daily_kd_deap_factor_5948234", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948234 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948235 = BashOperator(task_id="fac_daily_kd_deap_factor_5948235", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948235 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948236 = BashOperator(task_id="fac_daily_kd_deap_factor_5948236", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948236 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948237 = BashOperator(task_id="fac_daily_kd_deap_factor_5948237", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948237 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948238 = BashOperator(task_id="fac_daily_kd_deap_factor_5948238", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948238 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948239 = BashOperator(task_id="fac_daily_kd_deap_factor_5948239", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948239 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948240 = BashOperator(task_id="fac_daily_kd_deap_factor_5948240", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948240 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948241 = BashOperator(task_id="fac_daily_kd_deap_factor_5948241", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948241 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948242 = BashOperator(task_id="fac_daily_kd_deap_factor_5948242", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948242 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948243 = BashOperator(task_id="fac_daily_kd_deap_factor_5948243", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948243 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948244 = BashOperator(task_id="fac_daily_kd_deap_factor_5948244", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948244 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948245 = BashOperator(task_id="fac_daily_kd_deap_factor_5948245", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948245 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948246 = BashOperator(task_id="fac_daily_kd_deap_factor_5948246", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948246 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948247 = BashOperator(task_id="fac_daily_kd_deap_factor_5948247", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948247 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948248 = BashOperator(task_id="fac_daily_kd_deap_factor_5948248", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948248 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948249 = BashOperator(task_id="fac_daily_kd_deap_factor_5948249", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948249 ", dag=dag, pool="factor")
fac_daily_kd_deap_factor_5948250 = BashOperator(task_id="fac_daily_kd_deap_factor_5948250", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec.sh 5948250 ", dag=dag, pool="factor")
check_all_factor = BashOperator(task_id="check_all_factor", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factors-check.sh ", dag=dag, pool="factor")
# trigger kd01上执行的kd06的任务
trigger_kd01_kd06_alphanet_0_0_1_task = SSHOperator(task_id="trigger_01_alphanet_0_0_1_task", ssh_conn_id="kd01_keydriver", command="source /home/keydriver/airflow/bin/activate;airflow trigger_dag kd06_alphanet_0_0_1_task ", dag=dag)
check_qsdata >> [fac_daily_kd_deap_factor_8, fac_daily_kd_deap_factor_2, fac_daily_kd_deap_factor_3, fac_daily_kd_deap_factor_1, fac_daily_kd_deap_factor_6, fac_daily_kd_deap_factor_7, fac_daily_kd_deap_factor_4, fac_daily_kd_deap_factor_5, fac_daily_kd_deap_factor_18, fac_daily_kd_deap_factor_19, fac_daily_kd_deap_factor_23, fac_daily_kd_deap_factor_24, fac_daily_kd_deap_factor_25, fac_daily_kd_deap_factor_11, fac_daily_kd_deap_factor_12, fac_daily_kd_deap_factor_13, fac_daily_kd_deap_factor_14, fac_daily_kd_deap_factor_20, fac_daily_kd_deap_factor_16, fac_daily_kd_deap_factor_17]
fac_daily_kd_deap_factor_52 >> [fac_daily_kd_deap_factor_78]
l2_factor_check >> [check_qsdata]
fac_daily_kd_deap_factor_18 >> [fac_daily_kd_deap_factor_43]
fac_daily_kd_deap_factor_19 >> [fac_daily_kd_deap_factor_44]
fac_daily_kd_deap_factor_30 >> [fac_daily_kd_deap_factor_56]
fac_daily_kd_deap_factor_31 >> [fac_daily_kd_deap_factor_58]
fac_daily_kd_deap_factor_36 >> [fac_daily_kd_deap_factor_65]
fac_daily_kd_deap_factor_51 >> [fac_daily_kd_deap_factor_77]
fac_daily_kd_deap_factor_34 >> [fac_daily_kd_deap_factor_62]
fac_daily_kd_deap_factor_35 >> [fac_daily_kd_deap_factor_64]
fac_daily_kd_deap_factor_11 >> [fac_daily_kd_deap_factor_36]
fac_daily_kd_deap_factor_12 >> [fac_daily_kd_deap_factor_37]
fac_daily_kd_deap_factor_13 >> [fac_daily_kd_deap_factor_38]
fac_daily_kd_deap_factor_58 >> [fac_daily_kd_deap_factor_83]
fac_daily_kd_deap_factor_38 >> [fac_daily_kd_deap_factor_67]
fac_daily_kd_deap_factor_16 >> [fac_daily_kd_deap_factor_40]
fac_daily_kd_deap_factor_17 >> [fac_daily_kd_deap_factor_41]
fac_daily_kd_deap_factor_69 >> [fac_daily_kd_deap_factor_92]
fac_daily_kd_deap_factor_81 >> [check_all_factor]
fac_daily_kd_deap_factor_39 >> [fac_daily_kd_deap_factor_69]
fac_daily_kd_deap_factor_87 >> [check_all_factor]
fac_daily_kd_deap_factor_86 >> [check_all_factor]
fac_daily_kd_deap_factor_85 >> [check_all_factor]
fac_daily_kd_deap_factor_84 >> [check_all_factor]
fac_daily_kd_deap_factor_14 >> [fac_daily_kd_deap_factor_39]
fac_daily_kd_deap_factor_89 >> [check_all_factor]
fac_daily_kd_deap_factor_62 >> [fac_daily_kd_deap_factor_86]
fac_daily_kd_deap_factor_65 >> [fac_daily_kd_deap_factor_88]
fac_daily_kd_deap_factor_64 >> [fac_daily_kd_deap_factor_87]
fac_daily_kd_deap_factor_67 >> [fac_daily_kd_deap_factor_91]
fac_daily_kd_deap_factor_59 >> [fac_daily_kd_deap_factor_84]
fac_daily_kd_deap_factor_55 >> [fac_daily_kd_deap_factor_80]
fac_daily_kd_deap_factor_83 >> [check_all_factor]
fac_daily_kd_deap_factor_8 >> [fac_daily_kd_deap_factor_35]
fac_daily_kd_deap_factor_88 >> [check_all_factor]
fac_daily_kd_deap_factor_2 >> [fac_daily_kd_deap_factor_29]
fac_daily_kd_deap_factor_3 >> [fac_daily_kd_deap_factor_30]
fac_daily_kd_deap_factor_1 >> [fac_daily_kd_deap_factor_27]
fac_daily_kd_deap_factor_6 >> [fac_daily_kd_deap_factor_33]
fac_daily_kd_deap_factor_7 >> [fac_daily_kd_deap_factor_34]
fac_daily_kd_deap_factor_4 >> [fac_daily_kd_deap_factor_31]
fac_daily_kd_deap_factor_5 >> [fac_daily_kd_deap_factor_32]
fac_daily_kd_deap_factor_47 >> [fac_daily_kd_deap_factor_75]
fac_daily_kd_deap_factor_45 >> [fac_daily_kd_deap_factor_74]
fac_daily_kd_deap_factor_44 >> [fac_daily_kd_deap_factor_73]
fac_daily_kd_deap_factor_29 >> [fac_daily_kd_deap_factor_55]
fac_daily_kd_deap_factor_41 >> [fac_daily_kd_deap_factor_71]
fac_daily_kd_deap_factor_40 >> [fac_daily_kd_deap_factor_70]
fac_daily_kd_deap_factor_25 >> [fac_daily_kd_deap_factor_51]
fac_daily_kd_deap_factor_24 >> [fac_daily_kd_deap_factor_48]
fac_daily_kd_deap_factor_27 >> [fac_daily_kd_deap_factor_52]
fac_daily_kd_deap_factor_20 >> [fac_daily_kd_deap_factor_45]
fac_daily_kd_deap_factor_23 >> [fac_daily_kd_deap_factor_47]
fac_daily_kd_deap_factor_48 >> [fac_daily_kd_deap_factor_76]
fac_daily_kd_deap_factor_91 >> [check_all_factor]
fac_daily_kd_deap_factor_92 >> [check_all_factor]
fac_daily_kd_deap_factor_93 >> [check_all_factor]
fac_daily_kd_deap_factor_94 >> [check_all_factor]
fac_daily_kd_deap_factor_70 >> [fac_daily_kd_deap_factor_93]
fac_daily_kd_deap_factor_78 >> [check_all_factor]
fac_daily_kd_deap_factor_33 >> [fac_daily_kd_deap_factor_60]
fac_daily_kd_deap_factor_76 >> fac_daily_kd_deap_factor_5948156 >> [check_all_factor]
fac_daily_kd_deap_factor_77 >> fac_daily_kd_deap_factor_5948160 >> [check_all_factor]
fac_daily_kd_deap_factor_74 >> fac_daily_kd_deap_factor_5948158 >> [check_all_factor]
fac_daily_kd_deap_factor_75 >> fac_daily_kd_deap_factor_5948159 >> [check_all_factor]
fac_daily_kd_deap_factor_72 >> fac_daily_kd_deap_factor_5948155 >> [check_all_factor]
fac_daily_kd_deap_factor_73 >> fac_daily_kd_deap_factor_5948157 >> [check_all_factor]
fac_daily_kd_deap_factor_32 >> [fac_daily_kd_deap_factor_59]
fac_daily_kd_deap_factor_71 >> [fac_daily_kd_deap_factor_94]
fac_daily_kd_deap_factor_66 >> [fac_daily_kd_deap_factor_89]
fac_daily_kd_deap_factor_56 >> [fac_daily_kd_deap_factor_81]
fac_daily_kd_deap_factor_60 >> [fac_daily_kd_deap_factor_85]
fac_daily_kd_deap_factor_43 >> [fac_daily_kd_deap_factor_72]
fac_daily_kd_deap_factor_80 >> [check_all_factor]
check_qsdata >> fac_daily_kd_deap_factor_5948161 >> fac_daily_kd_deap_factor_5948162 >> fac_daily_kd_deap_factor_5948163 >> fac_daily_kd_deap_factor_5948164 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948165 >> fac_daily_kd_deap_factor_5948166 >> fac_daily_kd_deap_factor_5948167 >> fac_daily_kd_deap_factor_5948168 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948169 >> fac_daily_kd_deap_factor_5948170 >> fac_daily_kd_deap_factor_5948171 >> fac_daily_kd_deap_factor_5948172 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948173 >> fac_daily_kd_deap_factor_5948174 >> fac_daily_kd_deap_factor_5948175 >> fac_daily_kd_deap_factor_5948176 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948177 >> fac_daily_kd_deap_factor_5948178 >> fac_daily_kd_deap_factor_5948179 >> fac_daily_kd_deap_factor_5948180 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948181 >> fac_daily_kd_deap_factor_5948182 >> fac_daily_kd_deap_factor_5948183 >> fac_daily_kd_deap_factor_5948184 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948185 >> fac_daily_kd_deap_factor_5948186 >> fac_daily_kd_deap_factor_5948187 >> fac_daily_kd_deap_factor_5948188 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948189 >> fac_daily_kd_deap_factor_5948190 >> fac_daily_kd_deap_factor_5948191 >> fac_daily_kd_deap_factor_5948192 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948193 >> fac_daily_kd_deap_factor_5948194 >> fac_daily_kd_deap_factor_5948195 >> fac_daily_kd_deap_factor_5948196 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948197 >> fac_daily_kd_deap_factor_5948198 >> fac_daily_kd_deap_factor_5948199 >> fac_daily_kd_deap_factor_5948200 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948201 >> fac_daily_kd_deap_factor_5948202 >> fac_daily_kd_deap_factor_5948203 >> fac_daily_kd_deap_factor_5948204 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948205 >> fac_daily_kd_deap_factor_5948206 >> fac_daily_kd_deap_factor_5948207 >> fac_daily_kd_deap_factor_5948208 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948209 >> fac_daily_kd_deap_factor_5948210 >> fac_daily_kd_deap_factor_5948211 >> fac_daily_kd_deap_factor_5948212 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948213 >> fac_daily_kd_deap_factor_5948214 >> fac_daily_kd_deap_factor_5948215 >> fac_daily_kd_deap_factor_5948216 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948217 >> fac_daily_kd_deap_factor_5948218 >> fac_daily_kd_deap_factor_5948219 >> fac_daily_kd_deap_factor_5948220 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948221 >> fac_daily_kd_deap_factor_5948222 >> fac_daily_kd_deap_factor_5948223 >> fac_daily_kd_deap_factor_5948224 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948225 >> fac_daily_kd_deap_factor_5948226 >> fac_daily_kd_deap_factor_5948227 >> fac_daily_kd_deap_factor_5948228 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948229 >> fac_daily_kd_deap_factor_5948230 >> fac_daily_kd_deap_factor_5948231 >> fac_daily_kd_deap_factor_5948232 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948233 >> fac_daily_kd_deap_factor_5948234 >> fac_daily_kd_deap_factor_5948235 >> fac_daily_kd_deap_factor_5948236 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948237 >> fac_daily_kd_deap_factor_5948238 >> fac_daily_kd_deap_factor_5948239 >> fac_daily_kd_deap_factor_5948240 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948241 >> fac_daily_kd_deap_factor_5948242 >> fac_daily_kd_deap_factor_5948243 >> fac_daily_kd_deap_factor_5948244 >> fac_daily_kd_deap_factor_5948249 >> check_all_factor
check_qsdata >> fac_daily_kd_deap_factor_5948245 >> fac_daily_kd_deap_factor_5948246 >> fac_daily_kd_deap_factor_5948247 >> fac_daily_kd_deap_factor_5948248 >> fac_daily_kd_deap_factor_5948250 >> check_all_factor
fac_daily_kd_deap_factor_37 >> [fac_daily_kd_deap_factor_66]
check_all_factor >> trigger_kd01_kd06_alphanet_0_0_1_task
| 154.836735
| 588
| 0.83709
| 7,630
| 45,522
| 4.522149
| 0.041284
| 0.135405
| 0.169256
| 0.236958
| 0.945369
| 0.943717
| 0.834019
| 0.831933
| 0.631376
| 0.454005
| 0
| 0.088026
| 0.047691
| 45,522
| 294
| 589
| 154.836735
| 0.707896
| 0.003734
| 0
| 0
| 0
| 0.611307
| 0.433946
| 0.362764
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014134
| 0
| 0.014134
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db691970bb04c614f1aee6ef7a934d4edbc3cc62
| 109
|
py
|
Python
|
codes/style/training/Generators/__init__.py
|
liweileev/SOMGAN
|
11ec1c01b288a00f5a49906b9e683e67d3509701
|
[
"MIT"
] | null | null | null |
codes/style/training/Generators/__init__.py
|
liweileev/SOMGAN
|
11ec1c01b288a00f5a49906b9e683e67d3509701
|
[
"MIT"
] | null | null | null |
codes/style/training/Generators/__init__.py
|
liweileev/SOMGAN
|
11ec1c01b288a00f5a49906b9e683e67d3509701
|
[
"MIT"
] | null | null | null |
'''
Author: Liweileev
Date: 2022-01-04 23:12:35
LastEditors: Liweileev
LastEditTime: 2022-01-04 23:12:35
'''
| 15.571429
| 33
| 0.724771
| 18
| 109
| 4.388889
| 0.611111
| 0.151899
| 0.202532
| 0.253165
| 0.35443
| 0.35443
| 0
| 0
| 0
| 0
| 0
| 0.28866
| 0.110092
| 109
| 6
| 34
| 18.166667
| 0.525773
| 0.917431
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.