hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
64279708b3855421a401e56d281735620a7325b1
| 5,172
|
py
|
Python
|
tests/acceptance/test_Hosts.py
|
AntonKuzminRussia/web-scout
|
5b8fed2c5917c9ecc210052703a65f1204f4b347
|
[
"MIT"
] | 6
|
2017-10-11T18:56:05.000Z
|
2019-09-29T21:45:05.000Z
|
tests/acceptance/test_Hosts.py
|
AntonKuzminRussia/web-scout
|
5b8fed2c5917c9ecc210052703a65f1204f4b347
|
[
"MIT"
] | 3
|
2021-03-31T19:17:30.000Z
|
2021-12-13T20:16:23.000Z
|
tests/acceptance/test_Hosts.py
|
AntonKuzminRussia/web-scout
|
5b8fed2c5917c9ecc210052703a65f1204f4b347
|
[
"MIT"
] | null | null | null |
import subprocess
import os
import time
import re
runPath = os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + '/../../')
class Test_Hosts(object):
dict_path = '/tmp/wstest.dict'
headers_file_path = '/tmp/wstest.headers_file'
conf_file_path = "/tmp/wstest.conf_file"
def get_results_count(self, output):
return len(re.findall('^(\t.+)', output, re.M))
def test_dict(self):
fh = open(self.dict_path, 'w')
fh.write("test\nfoobar\nadmin\ndev")
fh.close()
output = subprocess.check_output([
'./ws.py',
'HostsDict',
'--template',
'@.wildcard-web.polygon.web-scout.ru',
'--ip',
'185.43.7.81',
'--false-re',
'Apache2 Ubuntu Default Page',
'--dict',
self.dict_path,
])
print(output)
output = output.decode('utf8')
assert self.get_results_count(output) == 1
assert output.count("admin.wildcard-web.polygon.web-scout.ru") == 1
def test_mask(self):
output = subprocess.check_output([
'./ws.py',
'HostsMask',
'--template',
'admi@.wildcard-web.polygon.web-scout.ru',
'--ip',
'185.43.7.81',
'--false-re',
'Apache2 Ubuntu Default Page',
'--mask',
'?l,1,1',
])
print(output)
output = output.decode('utf8')
assert self.get_results_count(output) == 1
assert output.count("admin.wildcard-web.polygon.web-scout.ru") == 1
def test_combine(self):
fh = open(self.dict_path, 'w')
fh.write("\nadmi\nde")
fh.close()
output = subprocess.check_output([
'./ws.py',
'HostsCombine',
'--combine-template',
'%d%%m%',
'--template',
'admi@.wildcard-web.polygon.web-scout.ru',
'--ip',
'185.43.7.81',
'--false-re',
'Apache2 Ubuntu Default Page',
'--mask',
'?l,1,1',
'--dict',
self.dict_path,
])
print(output)
output = output.decode('utf8')
assert self.get_results_count(output) == 1
assert output.count("admin.wildcard-web.polygon.web-scout.ru") == 1
def test_false_size(self):
fh = open(self.dict_path, 'w')
fh.write("test\nfoobar\nadmin\ndev")
fh.close()
output = subprocess.check_output([
'./ws.py',
'HostsDict',
'--template',
'@.wildcard-web.polygon.web-scout.ru',
'--ip',
'185.43.7.81',
'--false-size',
'11321',
'--dict',
self.dict_path,
])
print(output)
output = output.decode('utf8')
assert self.get_results_count(output) == 1
assert output.count("admin.wildcard-web.polygon.web-scout.ru") == 1
def test_msymbol(self):
fh = open(self.dict_path, 'w')
fh.write("test\nfoobar\nadmin\ndev")
fh.close()
output = subprocess.check_output([
'./ws.py',
'HostsDict',
'--template',
'%.wildcard-web.polygon.web-scout.ru',
'--msymbol',
'%',
'--ip',
'185.43.7.81',
'--false-re',
'Apache2 Ubuntu Default Page',
'--dict',
self.dict_path,
])
print(output)
output = output.decode('utf8')
assert self.get_results_count(output) == 1
assert output.count("admin.wildcard-web.polygon.web-scout.ru") == 1
def test_ignore_words_re(self):
fh = open(self.dict_path, 'w')
fh.write("test\nfoobar\nadmin\ndev")
fh.close()
output = subprocess.check_output([
'./ws.py',
'HostsDict',
'--template',
'@.wildcard-web.polygon.web-scout.ru',
'--ip',
'185.43.7.81',
'--false-re',
'Apache2 Ubuntu Default Page',
'--dict',
self.dict_path,
'--ignore-words-re',
'admin'
])
print(output)
output = output.decode('utf8')
assert self.get_results_count(output) == 0
assert output.count("admin.wildcard-web.polygon.web-scout.ru") == 0
def test_delay(self):
fh = open(self.dict_path, 'w')
fh.write("test\nfoobar\nadmin\ndev")
fh.close()
stime = int(time.time())
output = subprocess.check_output([
'./ws.py',
'HostsDict',
'--template',
'@.wildcard-web.polygon.web-scout.ru',
'--ip',
'185.43.7.81',
'--false-re',
'Apache2 Ubuntu Default Page',
'--dict',
self.dict_path,
'--threads',
'1',
'--delay',
'2',
])
print(output)
output = output.decode('utf8')
etime = int(time.time())
assert etime - stime > 8
| 28.893855
| 82
| 0.483952
| 550
| 5,172
| 4.450909
| 0.158182
| 0.068627
| 0.095588
| 0.11152
| 0.809641
| 0.809641
| 0.783497
| 0.783497
| 0.767974
| 0.755719
| 0
| 0.027894
| 0.355375
| 5,172
| 178
| 83
| 29.05618
| 0.706359
| 0
| 0
| 0.757764
| 0
| 0
| 0.268561
| 0.126063
| 0
| 0
| 0
| 0
| 0.080745
| 1
| 0.049689
| false
| 0
| 0.024845
| 0.006211
| 0.10559
| 0.043478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff367dd3679a71c878c8050451aba5440a429917
| 265
|
py
|
Python
|
userena/contrib/umessages/tests/__init__.py
|
ixc/django-userena
|
5a8c61dcf5133e8b745c25d3b54e5578043222d8
|
[
"BSD-3-Clause"
] | 3
|
2015-11-30T05:00:09.000Z
|
2021-09-21T00:03:57.000Z
|
userena/contrib/umessages/tests/__init__.py
|
ixc/django-userena
|
5a8c61dcf5133e8b745c25d3b54e5578043222d8
|
[
"BSD-3-Clause"
] | null | null | null |
userena/contrib/umessages/tests/__init__.py
|
ixc/django-userena
|
5a8c61dcf5133e8b745c25d3b54e5578043222d8
|
[
"BSD-3-Clause"
] | 2
|
2016-09-18T01:20:06.000Z
|
2021-09-21T00:01:54.000Z
|
from userena.contrib.umessages.tests.fields import *
from userena.contrib.umessages.tests.forms import *
from userena.contrib.umessages.tests.managers import *
from userena.contrib.umessages.tests.models import *
from userena.contrib.umessages.tests.views import *
| 44.166667
| 54
| 0.830189
| 35
| 265
| 6.285714
| 0.314286
| 0.25
| 0.409091
| 0.613636
| 0.836364
| 0.690909
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 265
| 5
| 55
| 53
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ff465eea7335fd57b27f7181a31c4ba1eeb5b360
| 3,743
|
py
|
Python
|
comparisons_among_different_model_formulations/on_MORPH2/DOEL_2groups_resnet18/generate_list.py
|
Xiejiu/second_age_estimation
|
89e9ef371a07aba0bbba496697176381e4e9432c
|
[
"MIT"
] | 1
|
2021-09-27T06:34:03.000Z
|
2021-09-27T06:34:03.000Z
|
comparisons_among_different_model_formulations/on_MORPH2/DOEL_2groups_resnet18/generate_list.py
|
Xiejiu/second_age_estimation
|
89e9ef371a07aba0bbba496697176381e4e9432c
|
[
"MIT"
] | 1
|
2021-07-29T01:14:11.000Z
|
2021-07-29T01:54:33.000Z
|
comparisons_among_different_model_formulations/on_MORPH2/DOEL_2groups_resnet18/generate_list.py
|
Xiejiu/second_age_estimation
|
89e9ef371a07aba0bbba496697176381e4e9432c
|
[
"MIT"
] | 1
|
2021-09-27T07:14:48.000Z
|
2021-09-27T07:14:48.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 4 18:37:47 2019
@author: xjc
"""
import os
orig_train_list='./S1_train_processed.txt'
orig_valid_list='./S1_validation_processed.txt'
orig_test_list='./S1_test_processed.txt'
modified_train_list='./S1_train_modified.txt'
modified_valid_list='./S1_valid_modified.txt'
modified_test_list='./S1_test_modified.txt'
if os.path.exists(modified_train_list):
os.remove(modified_train_list)
if os.path.exists(modified_valid_list):
os.remove(modified_valid_list)
if os.path.exists(modified_test_list):
os.remove(modified_test_list)
with open(orig_train_list) as f:
records=f.readlines()
for record in records:
img_name=record.split()[0]
img_age=record.split()[1]
new_age=''
for i in range(1,101):
if i<=int(img_age):
new_age+=' 1'
else:
new_age+=' 0'
new_record=img_name+new_age+' '+img_age+'\n'
with open(modified_train_list, 'a') as ff:
ff.write(new_record)
with open(orig_valid_list) as f:
records=f.readlines()
for record in records:
img_name=record.split()[0]
img_age=record.split()[1]
new_age=''
for i in range(1,101):
if i<=int(img_age):
new_age+=' 1'
else:
new_age+=' 0'
new_record=img_name+new_age+' '+img_age+'\n'
with open(modified_valid_list, 'a') as ff:
ff.write(new_record)
with open(orig_test_list) as f:
records=f.readlines()
for record in records:
img_name=record.split()[0]
img_age=record.split()[1]
new_age=''
for i in range(1,101):
if i<=int(img_age):
new_age+=' 1'
else:
new_age+=' 0'
new_record=img_name+new_age+' '+img_age+'\n'
with open(modified_test_list, 'a') as ff:
ff.write(new_record)
#################################
#################################
import os
orig_train_list='./S2_train_processed.txt'
orig_valid_list='./S2_validation_processed.txt'
orig_test_list='./S2_test_fixed_processed.txt'
modified_train_list='./S2_train_modified.txt'
modified_valid_list='./S2_valid_modified.txt'
modified_test_list='./S2_test_fixed_modified.txt'
if os.path.exists(modified_train_list):
os.remove(modified_train_list)
if os.path.exists(modified_valid_list):
os.remove(modified_valid_list)
if os.path.exists(modified_test_list):
os.remove(modified_test_list)
with open(orig_train_list) as f:
records=f.readlines()
for record in records:
img_name=record.split()[0]
img_age=record.split()[1]
new_age=''
for i in range(1,101):
if i<=int(img_age):
new_age+=' 1'
else:
new_age+=' 0'
new_record=img_name+new_age+' '+img_age+'\n'
with open(modified_train_list, 'a') as ff:
ff.write(new_record)
with open(orig_valid_list) as f:
records=f.readlines()
for record in records:
img_name=record.split()[0]
img_age=record.split()[1]
new_age=''
for i in range(1,101):
if i<=int(img_age):
new_age+=' 1'
else:
new_age+=' 0'
new_record=img_name+new_age+' '+img_age+'\n'
with open(modified_valid_list, 'a') as ff:
ff.write(new_record)
with open(orig_test_list) as f:
records=f.readlines()
for record in records:
img_name=record.split()[0]
img_age=record.split()[1]
new_age=''
for i in range(1,101):
if i<=int(img_age):
new_age+=' 1'
else:
new_age+=' 0'
new_record=img_name+new_age+' '+img_age+'\n'
with open(modified_test_list, 'a') as ff:
ff.write(new_record)
| 25.993056
| 49
| 0.618488
| 570
| 3,743
| 3.773684
| 0.107018
| 0.066946
| 0.063226
| 0.039052
| 0.958624
| 0.901906
| 0.781962
| 0.781962
| 0.781962
| 0.781962
| 0
| 0.025303
| 0.229228
| 3,743
| 144
| 50
| 25.993056
| 0.720277
| 0.024846
| 0
| 0.890909
| 0
| 0
| 0.097315
| 0.083893
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018182
| 0
| 0.018182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff50b89a69fe1972aedcc06d4dd59825b8d1315a
| 74
|
py
|
Python
|
exampleproject/b.py
|
pjdelport/pytest-testmon
|
dbbaf2f29cc7e9a2745f27dae91e44ce973e8d10
|
[
"MIT"
] | null | null | null |
exampleproject/b.py
|
pjdelport/pytest-testmon
|
dbbaf2f29cc7e9a2745f27dae91e44ce973e8d10
|
[
"MIT"
] | null | null | null |
exampleproject/b.py
|
pjdelport/pytest-testmon
|
dbbaf2f29cc7e9a2745f27dae91e44ce973e8d10
|
[
"MIT"
] | null | null | null |
def divide(a, b):
return a // b
def multiply(a, b):
return a * b
| 12.333333
| 19
| 0.540541
| 14
| 74
| 2.857143
| 0.428571
| 0.2
| 0.4
| 0.45
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.310811
| 74
| 5
| 20
| 14.8
| 0.784314
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
44141a92b7c31b021bbb389ba1254363f47e9517
| 14,941
|
py
|
Python
|
LatexPlots/MatLibPlot/IntersectionPlots.py
|
Hengoo/BVHRaytracer
|
d911f06e1af88859a9c94926669f334a64c516eb
|
[
"MIT"
] | null | null | null |
LatexPlots/MatLibPlot/IntersectionPlots.py
|
Hengoo/BVHRaytracer
|
d911f06e1af88859a9c94926669f334a64c516eb
|
[
"MIT"
] | null | null | null |
LatexPlots/MatLibPlot/IntersectionPlots.py
|
Hengoo/BVHRaytracer
|
d911f06e1af88859a9c94926669f334a64c516eb
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
inputFolder = "../Data/"
outputFolder = "../Plots/IntersectionPlots/"
showImage = True
def endPlot():
if showImage:
plt.show()
else:
plt.close()
def primaryAnalysis():
#load the workload file and visualize it.
filePath = inputFolder + "averageTable_AllInter.txt"
#load:
(branchFactor, leafSize, subdivision, primaryNodeIntersections, primaryLeafIntersections, primaryAabb,
primaryAabbSuccessRatio, primaryPrimitive, primaryPrimitiveSuccessRatio, secondaryNodeIntersections,
secondaryLeafIntersections, secondaryAabb, secondaryAabbSuccessRatio, secondaryPrimitive,
secondaryPrimitiveSuccessRatio, nodeSah, leafSah, nodeEpo, leafEpo, leafVolume, leafSurfaceArea,
traversalNodeFullness, traversalLeafFullness, BVHNodeFullness, BVHLeafFullness, nodeCount, leafCount,
averageLeafDepth, treeDepth, primaryWasteFactor, secondaryWasteFactor, primaryNodeCachelines,
secondaryNodeCachelines, totalTime, nodeTime, leafTime, perAabbCost, perTriCost, sahNodeFactor)= np.loadtxt(filePath, delimiter=',', unpack=True, skiprows=1)
#x = np.arange(branchFactor.size)
leafSizes = [1,2, 4, 8, 12, 16]
nodeSizes = [2, 4, 8, 12, 16]
printImprovement = False
fig = plt.figure(figsize=(12, 7))
plt.subplots_adjust(hspace = 0.25, wspace = 0.22)
ax = plt.subplot(2, 2, 1)
#Node intersections by branching factor.
for i in leafSizes:
filter2 = primaryNodeIntersections[leafSize == i]
filter1 = branchFactor[leafSize == i]
plt.plot(filter1, filter2, label='L' + str(i))
if printImprovement and i == 4:
print("-------- node intersection improvement")
lastValue = filter2[0]
for n in range(0, 15):
currentValue = filter2[n]
tmp1 = "%.2f" % ((currentValue / lastValue - 1) * 100)
tmp2 = "%.2f" % currentValue
print(" & N" + str(n + 2) +" & " + tmp2 + " & " + tmp1 + " \\% \\\\")
lastValue = currentValue
ax.set_ylim(ymin= 0)
plt.xlabel('Node size')
plt.ylabel('\# Primary Node intersections\n\$\\triangleleft$ less is better')
plt.legend()
ax = plt.subplot(2, 2, 2)
#aabb intersections by branching factor.
for i in leafSizes:
#filter2 = primaryAabb[leafSize == i]
filter3 = primaryAabb[leafSize == i]
filter2 = primaryNodeIntersections[leafSize == i]
filter2 *= filter1
plt.plot(filter1, filter2, label='L' + str(i))
if printImprovement and i == 4:
print("-------- aabb improvement")
lastValue = filter2[0]
for n in range(0, 15):
currentValue = filter2[n]
tmp1 = "%.2f" % ((currentValue / lastValue - 1) * 100)
tmp2 = "%.2f" % currentValue
print(" & N" + str(n + 2) +" & " + tmp2 + " & " + tmp1 + " \\% \\\\")
lastValue = currentValue
ax.set_ylim(ymin= 0)
plt.xlabel('Node size')
plt.ylabel('\# Primary Aabb intersections\n\$\\triangleleft$ less is better')
plt.legend()
ax = plt.subplot(2, 2, 3)
plt.plot([2],[1]) # <- in order to scip first color ;/
#Leaf intersections by Leaf size.
for i in nodeSizes:
filter2 = primaryLeafIntersections[branchFactor == i]
filter1 = leafSize[branchFactor == i]
if printImprovement and i == 4:
print("-------- leaf intersection improvement")
lastValue = filter2[0]
for n in range(0, 16):
currentValue = filter2[n]
tmp1 = "%.2f" % ((currentValue / lastValue - 1) * 100)
tmp2 = "%.2f" % currentValue
print(" & L" + str(n + 1) +" & " + tmp2 + " & " + tmp1 + " \\% \\\\")
lastValue = currentValue
plt.plot(filter1, filter2, label='N' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Leaf size')
plt.ylabel('\# Primary Leaf intersections\n\$\\triangleleft$ less is better')
plt.legend()
ax = plt.subplot(2, 2, 4)
plt.plot([2],[1]) # <- in order to scip first color ;/
#Leaf intersections by Leaf size.
for i in nodeSizes:
#filter2 = primaryPrimitive[branchFactor == i]
filter2 = primaryLeafIntersections[branchFactor == i]
filter1 = leafSize[branchFactor == i]
filter2 *= filter1
if printImprovement and i == 4:
print("-------- triangle improvement")
lastValue = filter2[0]
for n in range(0, 16):
currentValue = filter2[n]
tmp1 = "%.2f" % ((currentValue / lastValue - 1) * 100)
tmp2 = "%.2f" % currentValue
print(" & L" + str(n + 1) +" & " + tmp2 + " & " + tmp1 + " \\% \\\\")
lastValue = currentValue
plt.plot(filter1, filter2, label='N' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Leaf size')
plt.ylabel('\# Primary Triangle intersections\n\$\\triangleleft$ less is better')
plt.legend()
#save to file
plt.savefig(outputFolder + "IntersectionResults.pdf", bbox_inches='tight')
plt.savefig(outputFolder + "IntersectionResults.pgf", bbox_inches='tight')
endPlot()
def secondaryAnalysis():
#load the workload file and visualize it.
filePath = inputFolder + "averageTable_AllInter.txt"
#load:
(branchFactor, leafSize, subdivision, primaryNodeIntersections, primaryLeafIntersections, primaryAabb,
primaryAabbSuccessRatio, primaryPrimitive, primaryPrimitiveSuccessRatio, secondaryNodeIntersections,
secondaryLeafIntersections, secondaryAabb, secondaryAabbSuccessRatio, secondaryPrimitive,
secondaryPrimitiveSuccessRatio, nodeSah, leafSah, nodeEpo, leafEpo, leafVolume, leafSurfaceArea,
traversalNodeFullness, traversalLeafFullness, BVHNodeFullness, BVHLeafFullness, nodeCount, leafCount,
averageLeafDepth, treeDepth, primaryWasteFactor, secondaryWasteFactor, primaryNodeCachelines,
secondaryNodeCachelines, totalTime, nodeTime, leafTime, perAabbCost, perTriCost, sahNodeFactor)= np.loadtxt(filePath, delimiter=',', unpack=True, skiprows=1)
#x = np.arange(branchFactor.size)
leafSizes = [1,2, 4, 8, 12, 16]
nodeSizes = [2, 4, 8, 12, 16]
fig = plt.figure(figsize=(12, 7))
plt.subplots_adjust(hspace = 0.25, wspace = 0.22)
ax = plt.subplot(2, 2, 1)
#Node intersections by branching factor.
for i in leafSizes:
filter2 = secondaryNodeIntersections[leafSize == i]
filter1 = branchFactor[leafSize == i]
plt.plot(filter1, filter2, label='L' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Node size')
plt.ylabel('\# Secondary Node intersections\n\$\\triangleleft$ less is better')
plt.legend()
ax = plt.subplot(2, 2, 2)
#aabb intersections by branching factor.
for i in leafSizes:
#filter2 = secondaryAabb[leafSize == i]
filter2 = secondaryNodeIntersections[leafSize == i]
filter2 *= filter1
plt.plot(filter1, filter2, label='L' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Node size')
plt.ylabel('\# Secondary Aabb intersections\n\$\\triangleleft$ less is better')
plt.legend()
ax = plt.subplot(2, 2, 3)
plt.plot([2],[1]) # <- in order to scip first color ;/
#Leaf intersections by Leaf size.
for i in nodeSizes:
filter2 = secondaryLeafIntersections[branchFactor == i]
filter1 = leafSize[branchFactor == i]
plt.plot(filter1, filter2, label='N' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Leaf size')
plt.ylabel('\# Secondary Leaf intersections\n\$\\triangleleft$ less is better')
plt.legend()
ax = plt.subplot(2, 2, 4)
plt.plot([2],[1]) # <- in order to scip first color ;/
#Leaf intersections by Leaf size.
for i in nodeSizes:
#filter2 = secondaryPrimitive[branchFactor == i]
filter2 = secondaryLeafIntersections[branchFactor == i]
filter1 = leafSize[branchFactor == i]
filter2 *= filter1
plt.plot(filter1, filter2, label='N' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Leaf size')
plt.ylabel('\# Secondary Triangle intersections\n\$\\triangleleft$ less is better')
plt.legend()
#save to file
plt.savefig(outputFolder + "SecondaryIntersectionResults.pdf", bbox_inches='tight')
plt.savefig(outputFolder + "SecondaryIntersectionResults.pgf", bbox_inches='tight')
endPlot()
def measuredFullness():
#comparing sponza and gallery epos
filePath = inputFolder + "averageTable_AllInter.txt"
#filePath = inputFolder + "galleryTable_AllInter.txt"
#load:
(branchFactor, leafSize, subdivision, primaryNodeIntersections, primaryLeafIntersections, primaryAabb,
primaryAabbSuccessRatio, primaryPrimitive, primaryPrimitiveSuccessRatio, secondaryNodeIntersections,
secondaryLeafIntersections, secondaryAabb, secondaryAabbSuccessRatio, secondaryPrimitive,
secondaryPrimitiveSuccessRatio, nodeSah, leafSah, nodeEpo, leafEpo, leafVolume, leafSurfaceArea,
traversalNodeFullness, traversalLeafFullness, BVHNodeFullness, BVHLeafFullness, nodeCount, leafCount,
averageLeafDepth, treeDepth, primaryWasteFactor, secondaryWasteFactor, primaryNodeCachelines,
secondaryNodeCachelines, totalTime, nodeTime, leafTime, perAabbCost, perTriCost, sahNodeFactor) = np.loadtxt(filePath, delimiter=',', unpack=True, skiprows=1)
fig = plt.figure(figsize=(12,3.8))
plt.subplots_adjust(hspace = 0.4, wspace = 0.22)
nodeSizes = [2, 3, 4, 8, 12, 16]
leafSizes = [1,2,3,4,8,12,16]
#node fullness
ax = plt.subplot(1, 2, 1)
#plt.title("Node Fullness")
for i in leafSizes:
filter2 = traversalNodeFullness[leafSize == i] * 100
filter1 = branchFactor[leafSize == i]
filter2 /= filter1
plt.plot(filter1, filter2, label='L' + str(i))
plt.xticks(np.arange(2, 18, step=2))
#ax.set_ylim(ymin= -5, ymax = 105)
plt.xlabel('Node size')
plt.ylabel('Traversal Node Fullness [%]\nmore is better $\\triangleright$')
plt.legend(ncol=3)
#leaf fullness
ax = plt.subplot(1, 2, 2)
plt.plot([2],[100]) # <- in order to scip first color ;/
#plt.title("Leaf Fullness")
for i in nodeSizes:
filter2 = traversalLeafFullness[branchFactor == i] * 100
filter1 = leafSize[branchFactor == i]
filter2 /= filter1
plt.plot(filter1, filter2, label='N' + str(i))
plt.xticks(np.arange(2, 18, step=2))
#ax.set_ylim(ymin= -5, ymax = 105)
plt.xlabel('Leaf size')
plt.ylabel('Traversal Leaf Fullness [%]\nmore is better $\\triangleright$')
plt.legend(ncol=3)
#save to file
plt.savefig(outputFolder + "measuredFullness.pdf", bbox_inches='tight')
plt.savefig(outputFolder + "measuredFullness.pgf", bbox_inches='tight')
endPlot()
def intersectionCompPrim():
#compares all scene results
fig = plt.figure(figsize=(13, 15))
plt.subplots_adjust(hspace = 0.5, wspace = 0.20)
filePaths = ["sponzaTable_AllInter.txt", "sanMiguelTable_AllInter.txt", "galleryTable_AllInter.txt", "amazonLumberyardInteriorTable_AllInter.txt", "amazonLumberyardExteriorTable_AllInter.txt"]
sceneNames = ["Sponza", "San Miguel", "Gallery", "Bistro Interior", "Bistro Exterior"]
for iteration, n in enumerate(filePaths):
filePath = inputFolder + n
sceneName = sceneNames[iteration]
#load:
(branchFactor, leafSize, subdivision, primaryNodeIntersections, primaryLeafIntersections, primaryAabb,
primaryAabbSuccessRatio, primaryPrimitive, primaryPrimitiveSuccessRatio, secondaryNodeIntersections,
secondaryLeafIntersections, secondaryAabb, secondaryAabbSuccessRatio, secondaryPrimitive,
secondaryPrimitiveSuccessRatio, nodeSah, leafSah, nodeEpo, leafEpo, leafVolume, leafSurfaceArea,
traversalNodeFullness, traversalLeafFullness, BVHNodeFullness, BVHLeafFullness, nodeCount, leafCount,
averageLeafDepth, treeDepth, primaryWasteFactor, secondaryWasteFactor, primaryNodeCachelines,
secondaryNodeCachelines, totalTime, nodeTime, leafTime, perAabbCost, perTriCost, sahNodeFactor) = np.loadtxt(filePath, delimiter=',', unpack=True, skiprows=1)
leafSizes = [1,2, 4, 8, 12, 16]
nodeSizes = [2, 4, 8, 12, 16]
ax = plt.subplot(5, 2, 1 + iteration * 2)
plt.title(sceneName)
#Node intersections by branching factor.
for i in leafSizes:
filter2 = primaryNodeIntersections[leafSize == i]
filter1 = branchFactor[leafSize == i]
plt.plot(filter1, filter2, label='L' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Node size')
plt.ylabel('\# Primary Node intersections\n\$\\triangleleft$ less is better')
plt.legend(ncol=2)
ax = plt.subplot(5, 2, 2 + iteration * 2)
plt.title(sceneName)
plt.plot([2],[1]) # <- in order to scip first color ;/
#Leaf intersections by Leaf size.
for i in nodeSizes:
filter2 = primaryLeafIntersections[branchFactor == i]
filter1 = leafSize[branchFactor == i]
plt.plot(filter1, filter2, label='N' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Leaf size')
plt.ylabel('\# Primary Leaf intersections\n\$\\triangleleft$ less is better')
plt.legend(ncol=2)
plt.savefig(outputFolder + "intersectionComparisonPrim.pdf", bbox_inches='tight')
plt.savefig(outputFolder + "intersectionComparisonPrim.pgf", bbox_inches='tight')
endPlot()
def intersectionCompSec():
#compares all scene results
fig = plt.figure(figsize=(13, 15))
plt.subplots_adjust(hspace = 0.5, wspace = 0.20)
filePaths = ["sponzaTable_AllInter.txt", "sanMiguelTable_AllInter.txt", "galleryTable_AllInter.txt", "amazonLumberyardInteriorTable_AllInter.txt", "amazonLumberyardExteriorTable_AllInter.txt"]
sceneNames = ["Sponza", "San Miguel", "Gallery", "Bistro Interior", "Bistro Exterior"]
for iteration, n in enumerate(filePaths):
filePath = inputFolder + n
sceneName = sceneNames[iteration]
#load:
(branchFactor, leafSize, subdivision, primaryNodeIntersections, primaryLeafIntersections, primaryAabb,
primaryAabbSuccessRatio, primaryPrimitive, primaryPrimitiveSuccessRatio, secondaryNodeIntersections,
secondaryLeafIntersections, secondaryAabb, secondaryAabbSuccessRatio, secondaryPrimitive,
secondaryPrimitiveSuccessRatio, nodeSah, leafSah, nodeEpo, leafEpo, leafVolume, leafSurfaceArea,
traversalNodeFullness, traversalLeafFullness, BVHNodeFullness, BVHLeafFullness, nodeCount, leafCount,
averageLeafDepth, treeDepth, primaryWasteFactor, secondaryWasteFactor, primaryNodeCachelines,
secondaryNodeCachelines, totalTime, nodeTime, leafTime, perAabbCost, perTriCost, sahNodeFactor) = np.loadtxt(filePath, delimiter=',', unpack=True, skiprows=1)
leafSizes = [1,2, 4, 8, 12, 16]
nodeSizes = [2, 4, 8, 12, 16]
ax = plt.subplot(5, 2, 1 + iteration * 2)
plt.title(sceneName)
#Node intersections by branching factor.
for i in leafSizes:
filter2 = secondaryNodeIntersections[leafSize == i]
filter1 = branchFactor[leafSize == i]
plt.plot(filter1, filter2, label='L' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Node size')
plt.ylabel('\# Secondary Node intersections\n\$\\triangleleft$ less is better')
plt.legend(ncol=2)
ax = plt.subplot(5, 2, 2 + iteration * 2)
plt.title(sceneName)
plt.plot([2],[1]) # <- in order to scip first color ;/
#Leaf intersections by Leaf size.
for i in nodeSizes:
filter2 = secondaryLeafIntersections[branchFactor == i]
filter1 = leafSize[branchFactor == i]
plt.plot(filter1, filter2, label='N' + str(i))
ax.set_ylim(ymin= 0)
plt.xlabel('Leaf size')
plt.ylabel('\# Secondary Leaf intersections\n\$\\triangleleft$ less is better')
plt.legend(ncol=2)
plt.savefig(outputFolder + "intersectionComparisonSec.pdf", bbox_inches='tight')
plt.savefig(outputFolder + "intersectionComparisonSec.pgf", bbox_inches='tight')
endPlot()
#primaryAnalysis()
#secondaryAnalysis()
#measuredFullness()
intersectionCompPrim()
intersectionCompSec()
| 38.607235
| 193
| 0.723044
| 1,680
| 14,941
| 6.404762
| 0.119643
| 0.013662
| 0.015613
| 0.027323
| 0.902416
| 0.887918
| 0.861059
| 0.83987
| 0.827602
| 0.81868
| 0
| 0.029053
| 0.143029
| 14,941
| 387
| 194
| 38.607235
| 0.811309
| 0.092163
| 0
| 0.827957
| 0
| 0
| 0.157949
| 0.076496
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021505
| false
| 0
| 0.007168
| 0
| 0.028674
| 0.046595
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44824aeaf5977d7644e89e3a7ff4aa4f4d419c46
| 7,549
|
py
|
Python
|
convert_descriptor_to_swagger/mn_tasks.py
|
brighthive/convert_descriptor_to_swagger
|
ac59504ddf70622fb76e68b23dfdf4555836d3bd
|
[
"MIT"
] | null | null | null |
convert_descriptor_to_swagger/mn_tasks.py
|
brighthive/convert_descriptor_to_swagger
|
ac59504ddf70622fb76e68b23dfdf4555836d3bd
|
[
"MIT"
] | 3
|
2019-12-10T19:47:39.000Z
|
2019-12-20T19:11:59.000Z
|
convert_descriptor_to_swagger/mn_tasks.py
|
brighthive/convert_descriptor_to_swagger
|
ac59504ddf70622fb76e68b23dfdf4555836d3bd
|
[
"MIT"
] | 1
|
2020-04-29T18:18:49.000Z
|
2020-04-29T18:18:49.000Z
|
from deepmerge import always_merger
from convert_descriptor_to_swagger.reference.full_output import (
component_schemas_mn,
component_responses_mn,
component_request_bodies_mn,
)
def add_mn_schemas(swag: dict = {}) -> dict:
swag = always_merger.merge(swag, component_schemas_mn)
return swag
def add_mn_responses(swag: dict = {}) -> dict:
swag = always_merger.merge(swag, component_responses_mn)
return swag
def add_mn_request_bodies(swag: dict = {}) -> dict:
swag = always_merger.merge(swag, component_request_bodies_mn)
return swag
def add_mn_paths(relationship: list, swag: dict = {}) -> dict:
parent = relationship[0]
child = relationship[1]
paths_mn = {
"paths": {
f"/{parent}/{{id}}/{child}": {
"parameters": [{"$ref": "#/components/parameters/id"}],
"get": {
"tags": [f"{parent}"],
"summary": "Get many-to-many relationship",
"responses": {
"200": {"$ref": "#/components/responses/Relations"},
"401": {
"description": "Access denied",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/401"}
}
},
},
"500": {
"description": "Internal server error",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/500"}
}
},
},
},
},
"put": {
"tags": [f"{parent}"],
"summary": "Replace many-to-many relationship",
"requestBody": {"$ref": "#/components/requestBodies/Relations"},
"responses": {
"200": {"$ref": "#/components/responses/Relations"},
"401": {
"description": "Access denied",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/401"}
}
},
},
"500": {
"description": "Internal server error",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/500"}
}
},
},
},
},
"patch": {
"tags": [f"{parent}"],
"summary": "Update many-to-many relationship",
"requestBody": {"$ref": "#/components/requestBodies/Relations"},
"responses": {
"200": {"$ref": "#/components/responses/Relations"},
"401": {
"description": "Access denied",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/401"}
}
},
},
"500": {
"description": "Internal server error",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/500"}
}
},
},
},
},
},
f"/{child}/{{id}}/{parent}": {
"parameters": [{"$ref": "#/components/parameters/id"}],
"get": {
"tags": [f"{child}"],
"summary": "Get many-to-many relationship",
"responses": {
"200": {"$ref": "#/components/responses/Relations"},
"401": {
"description": "Access denied",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/401"}
}
},
},
"500": {
"description": "Internal server error",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/500"}
}
},
},
},
},
"put": {
"tags": [f"{child}"],
"summary": "Replace many-to-many relationship",
"requestBody": {"$ref": "#/components/requestBodies/Relations"},
"responses": {
"200": {"$ref": "#/components/responses/Relations"},
"401": {
"description": "Access denied",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/401"}
}
},
},
"500": {
"description": "Internal server error",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/500"}
}
},
},
},
},
"patch": {
"tags": [f"{child}"],
"summary": "Update many-to-many relationship",
"requestBody": {"$ref": "#/components/requestBodies/Relations"},
"responses": {
"200": {"$ref": "#/components/responses/Relations"},
"401": {
"description": "Access denied",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/401"}
}
},
},
"500": {
"description": "Internal server error",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/500"}
}
},
},
},
},
},
}
}
swag = always_merger.merge(swag, paths_mn)
return swag
| 41.251366
| 84
| 0.313551
| 388
| 7,549
| 6.012887
| 0.154639
| 0.133733
| 0.113159
| 0.144021
| 0.839691
| 0.828976
| 0.803258
| 0.803258
| 0.766395
| 0.707244
| 0
| 0.027678
| 0.559677
| 7,549
| 182
| 85
| 41.478022
| 0.674188
| 0
| 0
| 0.581395
| 0
| 0
| 0.265201
| 0.095907
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.011628
| 0
| 0.05814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2bd2a5fa412b8cec87b1a08dab779ef7fbd76638
| 16,480
|
py
|
Python
|
program05/Herd2.py
|
yijxiang/cs_python
|
390a8d17e5381186421bf8d6a55fced03ddd5cb7
|
[
"CNRI-Python"
] | 3
|
2021-05-19T16:33:15.000Z
|
2021-05-19T19:50:32.000Z
|
program05/Herd2.py
|
sip958/cs_python
|
390a8d17e5381186421bf8d6a55fced03ddd5cb7
|
[
"CNRI-Python"
] | null | null | null |
program05/Herd2.py
|
sip958/cs_python
|
390a8d17e5381186421bf8d6a55fced03ddd5cb7
|
[
"CNRI-Python"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
***********************************************
Pablo Fernandez
Emily Klein
Copyright 2016, www.pablofernandez.com
***********************************************
"""
print("************************************");
print("Pablo Fernandez");
print("Emily Klein");
print("Program 05 Herd of Turtles");
print("A&B:");
print("************************************");
"""
Above and Beyond
- Not written yet
"""
import turtle
import math
import random
turtles =[]
n = 99999
while((n>20) | (n<1)):
n = int(input("How many turtles would you like (1-20): "))
turtle_1 = turtle.Turtle()
turtle_2 = turtle.Turtle()
turtle_3 = turtle.Turtle()
turtle_4 = turtle.Turtle()
turtle_5 = turtle.Turtle()
turtle_6 = turtle.Turtle()
turtle_7 = turtle.Turtle()
turtle_8 = turtle.Turtle()
turtle_9 = turtle.Turtle()
turtle_10 = turtle.Turtle()
turtle_11 = turtle.Turtle()
turtle_12 = turtle.Turtle()
turtle_13 = turtle.Turtle()
turtle_14 = turtle.Turtle()
turtle_15 = turtle.Turtle()
turtle_16 = turtle.Turtle()
turtle_17 = turtle.Turtle()
turtle_18 = turtle.Turtle()
turtle_19 = turtle.Turtle()
turtle_20 = turtle.Turtle()
turtle_status = {
turtle_1: 'Sleeping',
turtle_2: 'Sleeping',
turtle_3: 'Sleeping',
turtle_4: 'Sleeping',
turtle_5: 'Sleeping',
turtle_6: 'Sleeping',
turtle_7: 'Sleeping',
turtle_8: 'Sleeping',
turtle_9: 'Sleeping',
turtle_10: 'Sleeping',
turtle_11: 'Sleeping',
turtle_12: 'Sleeping',
turtle_13: 'Sleeping',
turtle_14: 'Sleeping',
turtle_15: 'Sleeping',
turtle_16: 'Sleeping',
turtle_17: 'Sleeping',
turtle_18: 'Sleeping',
turtle_19: 'Sleeping',
turtle_20: 'Sleeping'
}
turtle_colors = {
turtle_1: "#%03x" % random.randint(0, 0xFFF),
turtle_2: "#%03x" % random.randint(0, 0xFFF),
turtle_3: "#%03x" % random.randint(0, 0xFFF),
turtle_4: "#%03x" % random.randint(0, 0xFFF),
turtle_5: "#%03x" % random.randint(0, 0xFFF),
turtle_6: "#%03x" % random.randint(0, 0xFFF),
turtle_7: "#%03x" % random.randint(0, 0xFFF),
turtle_8: "#%03x" % random.randint(0, 0xFFF),
turtle_9: "#%03x" % random.randint(0, 0xFFF),
turtle_10: "#%03x" % random.randint(0, 0xFFF),
turtle_11: "#%03x" % random.randint(0, 0xFFF),
turtle_12: "#%03x" % random.randint(0, 0xFFF),
turtle_13: "#%03x" % random.randint(0, 0xFFF),
turtle_14: "#%03x" % random.randint(0, 0xFFF),
turtle_15: "#%03x" % random.randint(0, 0xFFF),
turtle_16: "#%03x" % random.randint(0, 0xFFF),
turtle_17: "#%03x" % random.randint(0, 0xFFF),
turtle_18: "#%03x" % random.randint(0, 0xFFF),
turtle_19: "#%03x" % random.randint(0, 0xFFF),
turtle_20: "#%03x" % random.randint(0, 0xFFF)
}
turtle_position_x = {
turtle_1: random.randrange(-400, 400),
turtle_2: random.randrange(-400, 400),
turtle_3: random.randrange(-400, 400),
turtle_4: random.randrange(-400, 400),
turtle_5: random.randrange(-400, 400),
turtle_6: random.randrange(-400, 400),
turtle_7: random.randrange(-400, 400),
turtle_8: random.randrange(-400, 400),
turtle_9: random.randrange(-400, 400),
turtle_10: random.randrange(-400, 400),
turtle_11: random.randrange(-400, 400),
turtle_12: random.randrange(-400, 400),
turtle_13: random.randrange(-400, 400),
turtle_14: random.randrange(-400, 400),
turtle_15: random.randrange(-400, 400),
turtle_16: random.randrange(-400, 400),
turtle_17: random.randrange(-400, 400),
turtle_18: random.randrange(-400, 400),
turtle_19: random.randrange(-400, 400),
turtle_20: random.randrange(-400, 400)
}
turtle_position_y = {
turtle_1: random.randrange(-400, 400),
turtle_2: random.randrange(-400, 400),
turtle_3: random.randrange(-400, 400),
turtle_4: random.randrange(-400, 400),
turtle_5: random.randrange(-400, 400),
turtle_6: random.randrange(-400, 400),
turtle_7: random.randrange(-400, 400),
turtle_8: random.randrange(-400, 400),
turtle_9: random.randrange(-400, 400),
turtle_10: random.randrange(-400, 400),
turtle_11: random.randrange(-400, 400),
turtle_12: random.randrange(-400, 400),
turtle_13: random.randrange(-400, 400),
turtle_14: random.randrange(-400, 400),
turtle_15: random.randrange(-400, 400),
turtle_16: random.randrange(-400, 400),
turtle_17: random.randrange(-400, 400),
turtle_18: random.randrange(-400, 400),
turtle_19: random.randrange(-400, 400),
turtle_20: random.randrange(-400, 400)
}
"""
Code Note
The following code is necessary because since each Turtle is tracked by multiple
dictionary data structures holding the status of the turtle, color of the turtle,
positions of the turtle, this is currenty the best method since a for loop is going
to be going through the Awake turtles and then accessing colors and other necessary
information.
"""
if(n==1):
turtles = [turtle_1]
turtle_status[turtle_1] = "Awake"
if(n==2):
turtles = [turtle_1, turtle_2]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
if(n==3):
turtles = [turtle_1, turtle_2, turtle_3]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
if(n==4):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
if(n==5):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
if(n==6):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
if(n==7):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
if(n==8):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
if(n==9):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
if(n==10):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
if(n==11):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
if(n==12):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
if(n==13):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
if(n==14):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13, turtle_14]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
turtle_status[turtle_14] = "Awake"
if(n==15):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13, turtle_14, turtle_15]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
turtle_status[turtle_14] = "Awake"
turtle_status[turtle_15] = "Awake"
if(n==16):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13, turtle_14, turtle_15, turtle_16]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
turtle_status[turtle_14] = "Awake"
turtle_status[turtle_15] = "Awake"
turtle_status[turtle_16] = "Awake"
if(n==17):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13, turtle_14, turtle_15, turtle_16, turtle_17]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
turtle_status[turtle_14] = "Awake"
turtle_status[turtle_15] = "Awake"
turtle_status[turtle_16] = "Awake"
turtle_status[turtle_17] = "Awake"
if(n==18):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13, turtle_14, turtle_15, turtle_16, turtle_17, turtle_18]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
turtle_status[turtle_14] = "Awake"
turtle_status[turtle_15] = "Awake"
turtle_status[turtle_16] = "Awake"
turtle_status[turtle_17] = "Awake"
turtle_status[turtle_18] = "Awake"
if(n==19):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13, turtle_14, turtle_15, turtle_16, turtle_17, turtle_18, turtle_19]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
turtle_status[turtle_14] = "Awake"
turtle_status[turtle_15] = "Awake"
turtle_status[turtle_16] = "Awake"
turtle_status[turtle_17] = "Awake"
turtle_status[turtle_18] = "Awake"
turtle_status[turtle_19] = "Awake"
if(n==20):
turtles = [turtle_1, turtle_2, turtle_3, turtle_4, turtle_5, turtle_6, turtle_7, turtle_8, turtle_9, turtle_10, turtle_11, turtle_12, turtle_13, turtle_14, turtle_15, turtle_16, turtle_17, turtle_18, turtle_19, turtle_20]
turtle_status[turtle_1] = "Awake"
turtle_status[turtle_2] = "Awake"
turtle_status[turtle_3] = "Awake"
turtle_status[turtle_4] = "Awake"
turtle_status[turtle_5] = "Awake"
turtle_status[turtle_6] = "Awake"
turtle_status[turtle_7] = "Awake"
turtle_status[turtle_8] = "Awake"
turtle_status[turtle_9] = "Awake"
turtle_status[turtle_10] = "Awake"
turtle_status[turtle_11] = "Awake"
turtle_status[turtle_12] = "Awake"
turtle_status[turtle_13] = "Awake"
turtle_status[turtle_14] = "Awake"
turtle_status[turtle_15] = "Awake"
turtle_status[turtle_16] = "Awake"
turtle_status[turtle_17] = "Awake"
turtle_status[turtle_18] = "Awake"
turtle_status[turtle_19] = "Awake"
turtle_status[turtle_20] = "Awake"
wn = turtle.Screen()
wn.title("Pablo & Emily")
wn.bgcolor("#E5E4E2")
wn.screensize(800,800)
wn.colormode(255)
while(1==1):
for item in turtles:
item.shape("turtle")
current_color = turtle_colors[item]
item.color(current_color)
if(turtle_status[item]=="Exercising"):
item.speed(20)
item.pendown()
item.forward(random.randrange(30, 120))
item.right(90)
item.right(random.randrange(-180, 180))
if(turtle_status[item]=="Awake"):
item.penup()
item.speed(100)
item.goto(turtle_position_x[item],turtle_position_y[item])
item.left(random.randrange(-180, 180))
turtle_status[item] = "Exercising"
wn.mainloop()
print("----------------------------------------");
| 36.460177
| 225
| 0.674333
| 2,293
| 16,480
| 4.521151
| 0.057567
| 0.247709
| 0.366355
| 0.42153
| 0.81499
| 0.812096
| 0.756053
| 0.756053
| 0.753352
| 0.753352
| 0
| 0.081936
| 0.175
| 16,480
| 452
| 226
| 36.460177
| 0.680568
| 0.011286
| 0
| 0.635443
| 0
| 0
| 0.09886
| 0.007052
| 0
| 0
| 0.006297
| 0
| 0
| 1
| 0
| false
| 0
| 0.007595
| 0
| 0.007595
| 0.017722
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a61fad7c5657a0677edfd05ace0df1e4d887930c
| 7,278
|
py
|
Python
|
xmas1.py
|
abdulfaizp/adventofcode
|
11dd475312d69aadfa341a5d1e39b521cb6afe7c
|
[
"CC0-1.0"
] | null | null | null |
xmas1.py
|
abdulfaizp/adventofcode
|
11dd475312d69aadfa341a5d1e39b521cb6afe7c
|
[
"CC0-1.0"
] | null | null | null |
xmas1.py
|
abdulfaizp/adventofcode
|
11dd475312d69aadfa341a5d1e39b521cb6afe7c
|
[
"CC0-1.0"
] | null | null | null |
def basement(a):
if a==(-1):
return True
else:
return False
def main():
indata="((((()(()(((((((()))(((()((((()())(())()(((()((((((()((()(()(((()(()((())))()((()()())))))))))()((((((())((()))(((((()(((((((((()()))((()(())()((())((()(()))((()))()))()(((((()(((()()))()())((()((((())()())()((((())()(()(()(((()(())(()(())(((((((())()()(((())(()(()(()(())))(()((((())((()))(((()(()()(((((()()(()(((()(((((())()))()((()(()))()((()((((())((((())(()(((())()()(()()()()()(())((((())((())(()()))()((((())))((((()())()((((())((()())((())(())(((((()((((()(((()((((())(()(((()()))()))((((((()((())()())))(((()(()))(()()(()(((()(()))((()()()())((()()()(((())())()())())())((()))(()(()))(((((()(()(())((()(())(())()((((()())()))((((())(())((())())((((()(((())(())((()()((((()((((((()(())()()(()(()()((((()))(())()())()))(())))(())))())()()(())(()))()((()(()(())()()))(()())))))(()))(()()))(())(((((()(()(()()((())()())))))((())())((())(()(())((()))(())(((()((((((((()()()(()))()()(((()))()((()()(())(())())()(()(())))(((((()(())(())(()))))())()))(()))()(()(((((((()((((())))())())())())()((((((((((((((()()((((((()()()())())()())())())(())(())))())((()())((()(()))))))()))))))))))))))))())((())((())()()))))))(((()((()(()()))((())(()()))()()())))(())))))))(()(((())))())()())))()()(())()))()(()))())((()()))))(()))))()))(()()(())))))))()(((()))))()(()))(())())))))()))((()))((()))())(())))))))))((((())()))()))()))())(())()()(())))())))(()())()))((()()(())))(())((((((()(())((()(((()(()()(())))()))))))()))()(()((()))()(()))(()(((())((((())())(())(()))))))))())))))))())())))))())))))()()(((())()(()))))))))())))))(())()()()))()))()))(()(())()()())())))))))())()(()(()))))()()()))))())(()))))()()))))()())))))(((())()()))(()))))))))))()()))))()()()))))(()())())()()())()(()))))()(()))(())))))))(((((())(())())()()))()()))(())))))()(()))))(())(()()))()())()))()))()))()))))())()()))())())))(()))(()))))))())()(((())()))))))))()))()())))())))())))()))))))))))()()))(()()))))))(())()(()))))())(()))))(()))))(()())))))())())()()))))())()))))))))(()))))()))))))()(()())))))))()))())))())))())))())))))))())(()()))))))(()())())))()())()))))))))))))))())))()(())))()))())()()(())(()()))(())))())()())(()(()(()))))())))))))))))())(()))()))()))))(())()())()())))))))))))()()))))))))))))())())))))(()())))))))))))())(())))()))))))))())())(()))()))(())))()))()()(())()))))))()((((())()))())())))))()))()))))((()())()))))())))(())))))))))))))))))()))))()()())()))()()))))())()))((()())))())))(()))(()())))))))()))()))))(())))))))(())))))())()()(()))())()))()()))))())()()))))())()))())))))))(()))))()())()))))))))(()))())))(()))()))))(())()))())())(())())())))))))((((())))))()))()))()())()(())))()))()))()())(()())()()(()())()))))())())))))(()))()))))())(()()(())))))(())()()((())())))))(())(())))))))())))))))))()(())))))))()())())())()(()))))))))(()))))))))())()()))()(()))))))()))))))())))))))(())))()()(())()())))))(((())))()((())()))())))(()()))())(())())))()(((()())))))()(()()())))()()(()()(()()))())()(()()()))())()()))()())(()))))())))))())))(())()()))))(()))))(())(()))(())))))()()))()))))())()))()()(())())))((()))())()))))))()()))))((()(()))))()()))))))())))))())(()((()())))))))))))()())())))()))(()))))))(()))(())()())))(()))))))))())()()()()))))(()())))))))((())))()))(()))(())(())()())()))))))))(())))())))(()))()()))(()()))(()))())))()(())))())((()((()(())))((())))()))))((((())())()())))(())))()))))))())(()()((())))())()(()())))))(()())()))())))))))((())())))))))(()(()))())()()(()()(((()(((()())))))()))))))()(())(()()((()()(())()()))())()())()))()())())())))))))(((())))))))()()))))))(((())()))(()()))(()()))))(()(()()((((())()())((()()))))(()(())))))()((()()()())()()((()((()()))(()))(((()()()))(((())))()(((())()))))))((()(())())))(()())(((((()(()))(()((()))(()())()))))(()(()))()(()))(())(((())(()()))))()()))(((()))))(()()()()))())))((()()()(())()))()))))()()))()))))))((((((()()()))))())((()()(((()))))(()(())(()()())())())))()(((()()))(())((())))(()))(()()()())((())())())(()))))()))()((()(())()(()()(())(()))(())()))(())(()))))(())(())())(()()(()((()()((())))((()))()((())))(((()()()()((((()))(()()))()()()(((())((())())(()()(()()()))()((())(())()))())(((()()(())))()((()()())()())(()(())())(((())(())())((())(())()(((()()))(())))((())(()())())(())((()()()((((((())))((()(((((())()))()))(())(()()))()))(())()()))(())((()()())()()(()))())()((())))()((()()())((((()())((())())())((()((()))()))((())((()()(()((()()(((())(()()))))((()((())()(((())(()((())())((())(()((((((())())()(()())()(())(((())((((((()(())(()((()()()((()()(()()()())))()()(((((()()))()((((((()))()(()(()(()(((()())((()))())()((()))(())))()))()()))())()()))())((((())(()(()))(((((((())(((()(((((()(((()()((((())(((())())))(()()()(()(()))()))((((((()))((()(((()(())((()((((()((((((())(((((())))(((()(()))))(((()(((())()((())(()((()))(((()()(((())((((()(()(((((()))(((()(((((((()(()()()(()(()(()()())(())(((((()(())())()())(()(()(()))()(()()()())(()()(()((()))()((())())()(()))((())(()))()(()))()(((()(()(()((((((()()()()())()(((((()()(((()()()((()(((((()))((((((((()()()(((((()))))))(()()()(())(()))(()()))))(())()))(((((()(((((()()(()(()())(((()))((((()((()(()(()((()(()((())))()(((()((()))((()))(((((((((()((()((()(())))()((((()((()()))((())(((()(((((()()(()(()()((()(()()()(((((((())())()())))))((((()()(()))()))(()((())()(()(((((((((()()(((()(()())(()((()())((())())((((()(((()(((()((((()((()((((()(()((((((())((((((((((((()()(()()((((((((((((((()((()()))()((((((((((((())((((()(()())((()(()(()))()(((((()()(((()()))()())(())((()(((((()((())(((((()((()(((((()))()()((((())()((((())(((((((((()(())(()(())))())(()((())(((())(())(())())(()(()(())()()((()((())()(((()(((((()(())))()(((()((())))((()()()(((()(((()((()(()(())(()((()())(()(()(((()(((((((((())(()((((()()))(()((((()()()()(((()((((((((()(()()((((((()(()()(()((()((((((((((()()(((((((()())(())))(((()()))(((((()((()()())(()()((((())((()((((()))))(())((()(()()(((()(()(((()((((()(((((()))())())(()((())()))(((()())((())((())((((()((()((((((())(()((((()()))((((((())()(()))((()(((())((((((((((()()(((((()(((((()((()()()((((())))(()))()((()(())()()((()((((((((((()((())(())(((((()(()(()()))((((()((((()()((()(((()(((((((((()(()((()((()))((((((()(((())()()((()(((((((()())))()()(()((()((()()(((()(()()()()((((()((())((((()(((((((((()(((()()(((()(()(((()(((()((())()(()((()(()(()(()))()(((()))(()((((()((())((((())((((((())(()))(()((((())((()(()((((((((()()((((((()(()(()()()(())((()((()()(((()(((((((()()((()(((((((()))(((((()(((()(()()()(()(((()((()()((())(()(((((((((()(()((()((((((()()((())()))(((((()((())()())()(((((((((((()))((((()()()()())(()()(()(()()))()))(()))(()(((()()))())(()(()))()()((())(()())()())()(()))()))(()()(()((((((())((()(((((((((((()(())()((()(()((()((()(()((()((((((((((()()())((())()(())))((())()())()(((((()(()())((((()((()(())(()))(((())()((()))(((((())(()))()()(()))(((())((((()((((()(())))(((((((()))))())()())(())((())()(()()((()(()))()(()()(()()((()())((())((()()))((((()))()()))(()()(())()()(((((()(())((()((((()))()))(()())())(((()()(()()))(())))))(()))((())(((((()((((()))()((((()))()((())(((())))(((()())))((()(()()(("
pos=0
floor=0
for letter in indata:
pos+=1
if letter=="(":
floor+=1
if basement(floor):
print pos
else:
floor-=1
if basement(floor):
print pos
main()
| 303.25
| 7,010
| 0.021847
| 42
| 7,278
| 3.785714
| 0.428571
| 0.056604
| 0.100629
| 0.201258
| 0.36478
| 0.36478
| 0.36478
| 0
| 0
| 0
| 0
| 0.000833
| 0.010855
| 7,278
| 24
| 7,011
| 303.25
| 0.021253
| 0
| 0
| 0.3
| 0
| 0
| 0.961808
| 0.961671
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a6ded1fec0f1e801d90133bce2e4764ddcd034b4
| 149
|
py
|
Python
|
FrCmprss/FrCmprssMain.py
|
sadaakisz/FractalCompression
|
2a2a4f2f092c9d4945de988e830ca15b4a0e0bff
|
[
"Unlicense"
] | null | null | null |
FrCmprss/FrCmprssMain.py
|
sadaakisz/FractalCompression
|
2a2a4f2f092c9d4945de988e830ca15b4a0e0bff
|
[
"Unlicense"
] | null | null | null |
FrCmprss/FrCmprssMain.py
|
sadaakisz/FractalCompression
|
2a2a4f2f092c9d4945de988e830ca15b4a0e0bff
|
[
"Unlicense"
] | null | null | null |
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from scipy import ndimage
from scipy import optimize
import numpy as np
import math
| 21.285714
| 32
| 0.838926
| 24
| 149
| 5.208333
| 0.583333
| 0.256
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147651
| 149
| 6
| 33
| 24.833333
| 0.984252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a6ef3433e3d9f5e0335a195515e128c6e248a1f9
| 12,686
|
py
|
Python
|
Incident-Response/Tools/dfirtrack/dfirtrack_api/tests/dfirtrack_main/task/test_task_api_views.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 1
|
2021-07-24T17:22:50.000Z
|
2021-07-24T17:22:50.000Z
|
Incident-Response/Tools/dfirtrack/dfirtrack_api/tests/dfirtrack_main/task/test_task_api_views.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 2
|
2022-02-28T03:40:31.000Z
|
2022-02-28T03:40:52.000Z
|
Incident-Response/Tools/dfirtrack/dfirtrack_api/tests/dfirtrack_main/task/test_task_api_views.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 2
|
2022-02-25T08:34:51.000Z
|
2022-03-16T17:29:44.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from django.utils import timezone
from dfirtrack_main.models import System, Systemstatus, Tag, Tagcolor, Task, Taskname, Taskpriority, Taskstatus
import urllib.parse
class TaskAPIViewTestCase(TestCase):
""" task API view tests """
@classmethod
def setUpTestData(cls):
# create user
test_user = User.objects.create_user(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# create mandatory foreign key objects
# create object
systemstatus_1 = Systemstatus.objects.create(systemstatus_name='systemstatus_1')
# create object
taskname_1 = Taskname.objects.create(taskname_name='taskname_1')
# create object
Taskname.objects.create(taskname_name='taskname_2')
# create object
Taskname.objects.create(taskname_name='taskname_3')
# create object
taskpriority_1 = Taskpriority.objects.create(taskpriority_name='prio_1')
# create object
Taskpriority.objects.create(taskpriority_name='prio_2')
# create object
Taskpriority.objects.create(taskpriority_name='prio_3')
# create object
taskstatus_1 = Taskstatus.objects.create(taskstatus_name='taskstatus_1')
# create optional foreign key objects
# create object
tagcolor_1 = Tagcolor.objects.create(tagcolor_name='tagcolor_1')
# create object
Tag.objects.create(
tagcolor = tagcolor_1,
tag_name = 'tag_1',
)
# create object
System.objects.create(
system_name = 'system_api_1',
systemstatus = systemstatus_1,
system_modify_time = timezone.now(),
system_created_by_user_id = test_user,
system_modified_by_user_id = test_user,
)
# create object
Task.objects.create(
taskname = taskname_1,
taskpriority = taskpriority_1,
taskstatus = taskstatus_1,
task_created_by_user_id = test_user,
task_modified_by_user_id = test_user,
)
# create object
taskname_parent = Taskname.objects.create(taskname_name='taskname_parent')
# create object
Task.objects.create(
taskname = taskname_parent,
taskpriority = taskpriority_1,
taskstatus = taskstatus_1,
task_created_by_user_id = test_user,
task_modified_by_user_id = test_user,
)
def test_task_list_api_unauthorized(self):
""" unauthorized access is forbidden"""
# get response
response = self.client.get('/api/task/')
# compare
self.assertEqual(response.status_code, 401)
def test_task_list_api_method_get(self):
""" GET is allowed """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get response
response = self.client.get('/api/task/')
# compare
self.assertEqual(response.status_code, 200)
def test_task_list_api_method_post(self):
""" POST is allowed """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get user
test_user_id = User.objects.get(username='testuser_task_api').id
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_2').taskname_id
# get object
taskpriority_id = Taskpriority.objects.get(taskpriority_name='prio_1').taskpriority_id
# get object
taskstatus_id = Taskstatus.objects.get(taskstatus_name='taskstatus_1').taskstatus_id
# create POST string
poststring = {
"taskname": taskname_id,
"taskpriority": taskpriority_id,
"taskstatus": taskstatus_id,
"task_created_by_user_id": test_user_id,
"task_modified_by_user_id": test_user_id,
}
# get response
response = self.client.post('/api/task/', data=poststring, content_type='application/json')
# compare
self.assertEqual(response.status_code, 201)
def test_task_list_api_method_post_all_id(self):
""" POST is allowed """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get user
test_user_id = User.objects.get(username='testuser_task_api').id
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_3').taskname_id
# get object
taskpriority_id = Taskpriority.objects.get(taskpriority_name='prio_1').taskpriority_id
# get object
taskstatus_id = Taskstatus.objects.get(taskstatus_name='taskstatus_1').taskstatus_id
# get object
taskname_parent_id = Taskname.objects.get(taskname_name='taskname_parent').taskname_id
# get object
parenttask_id = Task.objects.get(taskname=taskname_parent_id).task_id
# get object
system_id = System.objects.get(system_name='system_api_1').system_id
# get object
tag_id = Tag.objects.get(tag_name='tag_1').tag_id
# create POST string
poststring = {
"parent_task": parenttask_id,
"system": system_id,
"tag": [
tag_id,
],
"taskname": taskname_id,
"taskpriority": taskpriority_id,
"taskstatus": taskstatus_id,
"task_scheduled_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_started_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_finished_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_due_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_assigned_to_user_id": test_user_id,
"task_created_by_user_id": test_user_id,
"task_modified_by_user_id": test_user_id,
}
# get response
response = self.client.post('/api/task/', data=poststring, content_type='application/json')
# compare
self.assertEqual(response.status_code, 201)
def test_task_list_api_redirect(self):
""" test redirect with appending slash """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# create url
destination = urllib.parse.quote('/api/task/', safe='/')
# get response
response = self.client.get('/api/task', follow=True)
# compare
self.assertRedirects(response, destination, status_code=301, target_status_code=200)
def test_task_detail_api_unauthorized (self):
""" unauthorized access is forbidden"""
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_1').taskname_id
# get object
task_api_1 = Task.objects.get(
taskname = taskname_id,
)
# get response
response = self.client.get('/api/task/' + str(task_api_1.task_id) + '/')
# compare
self.assertEqual(response.status_code, 401)
def test_task_detail_api_method_get(self):
""" GET is allowed """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_1').taskname_id
# get object
task_api_1 = Task.objects.get(
taskname = taskname_id,
)
# get response
response = self.client.get('/api/task/' + str(task_api_1.task_id) + '/')
# compare
self.assertEqual(response.status_code, 200)
def test_task_detail_api_method_delete(self):
""" DELETE is forbidden """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_1').taskname_id
# get object
task_api_1 = Task.objects.get(
taskname = taskname_id,
)
# get response
response = self.client.delete('/api/task/' + str(task_api_1.task_id) + '/')
# compare
self.assertEqual(response.status_code, 405)
def test_task_detail_api_method_put(self):
""" PUT is allowed """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get user
test_user_id = User.objects.get(username='testuser_task_api').id
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_1').taskname_id
# get object
new_taskpriority_id = Taskpriority.objects.get(taskpriority_name='prio_2').taskpriority_id
# get object
taskstatus_id = Taskstatus.objects.get(taskstatus_name='taskstatus_1').taskstatus_id
# get object
task_api_1 = Task.objects.get(
taskname = taskname_id,
)
# create url
destination = urllib.parse.quote('/api/task/' + str(task_api_1.task_id) + '/', safe='/')
# create PUT string
putstring = {
"taskname": taskname_id,
"taskpriority": new_taskpriority_id,
"taskstatus": taskstatus_id,
"task_created_by_user_id": test_user_id,
"task_modified_by_user_id": test_user_id,
}
# get response
response = self.client.put(destination, data=putstring, content_type='application/json')
# compare
self.assertEqual(response.status_code, 200)
def test_task_detail_api_method_put_all_id(self):
""" PUT is allowed """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get user
test_user_id = User.objects.get(username='testuser_task_api').id
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_1').taskname_id
# get object
new_taskpriority_id = Taskpriority.objects.get(taskpriority_name='prio_3').taskpriority_id
# get object
taskstatus_id = Taskstatus.objects.get(taskstatus_name='taskstatus_1').taskstatus_id
# get object
task_api_1 = Task.objects.get(
taskname = taskname_id,
)
# get object
taskname_parent_id = Taskname.objects.get(taskname_name='taskname_parent').taskname_id
# get object
parenttask_id = Task.objects.get(taskname=taskname_parent_id).task_id
# get object
system_id = System.objects.get(system_name='system_api_1').system_id
# get object
tag_id = Tag.objects.get(tag_name='tag_1').tag_id
# create url
destination = urllib.parse.quote('/api/task/' + str(task_api_1.task_id) + '/', safe='/')
# create PUT string
putstring = {
"parent_task": parenttask_id,
"system": system_id,
"tag": [
tag_id,
],
"taskname": taskname_id,
"taskpriority": new_taskpriority_id,
"taskstatus": taskstatus_id,
"task_scheduled_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_started_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_finished_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_due_time": timezone.now().strftime('%Y-%m-%dT%H:%M'),
"task_assigned_to_user_id": test_user_id,
"task_created_by_user_id": test_user_id,
"task_modified_by_user_id": test_user_id,
}
# get response
response = self.client.put(destination, data=putstring, content_type='application/json')
# compare
self.assertEqual(response.status_code, 200)
def test_task_detail_api_redirect(self):
""" test redirect with appending slash """
# login testuser
self.client.login(username='testuser_task_api', password='jmvsz1Z551zZ4E3Cnp8D')
# get object
taskname_id = Taskname.objects.get(taskname_name='taskname_1').taskname_id
# get object
task_api_1 = Task.objects.get(
taskname = taskname_id,
)
# create url
destination = urllib.parse.quote('/api/task/' + str(task_api_1.task_id) + '/', safe='/')
# get response
response = self.client.get('/api/task/' + str(task_api_1.task_id), follow=True)
# compare
self.assertRedirects(response, destination, status_code=301, target_status_code=200)
| 39.64375
| 111
| 0.634242
| 1,453
| 12,686
| 5.246387
| 0.079835
| 0.044602
| 0.037518
| 0.029385
| 0.89348
| 0.875246
| 0.854257
| 0.828808
| 0.773186
| 0.767546
| 0
| 0.017572
| 0.259814
| 12,686
| 319
| 112
| 39.768025
| 0.794249
| 0.111067
| 0
| 0.652174
| 0
| 0
| 0.145299
| 0.021233
| 0
| 0
| 0
| 0
| 0.059783
| 1
| 0.065217
| false
| 0.054348
| 0.027174
| 0
| 0.097826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
a6efb62809a097581155da404fa1b0c3b2ddc91a
| 16,477
|
py
|
Python
|
accelbyte_py_sdk/api/platform/wrappers/_payment_station.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/platform/wrappers/_payment_station.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/platform/wrappers/_payment_station.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import HeaderStr
from ....core import get_namespace as get_services_namespace
from ....core import run_request
from ....core import run_request_async
from ....core import deprecated
from ....core import same_doc_as
from ..models import Customization
from ..models import ErrorEntity
from ..models import PaymentMethod
from ..models import PaymentOrderDetails
from ..models import PaymentOrderPaidResult
from ..models import PaymentProcessResult
from ..models import PaymentToken
from ..models import PaymentUrl
from ..models import PaymentUrlCreate
from ..models import TaxResult
from ..operations.payment_station import GetPaymentCustomization
from ..operations.payment_station import GetPaymentCustomizationPaymentProviderEnum
from ..operations.payment_station import GetPaymentPublicConfig
from ..operations.payment_station import GetPaymentPublicConfigPaymentProviderEnum
from ..operations.payment_station import GetPaymentTaxValue
from ..operations.payment_station import GetPaymentTaxValuePaymentProviderEnum
from ..operations.payment_station import Pay
from ..operations.payment_station import PayPaymentProviderEnum
from ..operations.payment_station import PublicCheckPaymentOrderPaidStatus
from ..operations.payment_station import PublicGetPaymentMethods
from ..operations.payment_station import PublicGetPaymentUrl
from ..operations.payment_station import PublicGetQRCode
from ..operations.payment_station import PublicGetUnpaidPaymentOrder
from ..operations.payment_station import PublicNormalizePaymentReturnUrl
from ..operations.payment_station import PublicNormalizePaymentReturnUrlPaymentProviderEnum
from ..models import PaymentMethodPaymentProviderEnum
from ..models import PaymentUrlPaymentProviderEnum, PaymentUrlPaymentTypeEnum
from ..models import PaymentUrlCreatePaymentProviderEnum
@deprecated
@same_doc_as(GetPaymentCustomization)
def get_payment_customization(payment_provider: Union[str, GetPaymentCustomizationPaymentProviderEnum], region: str, sandbox: Optional[bool] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPaymentCustomization.create(
payment_provider=payment_provider,
region=region,
sandbox=sandbox,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@deprecated
@same_doc_as(GetPaymentCustomization)
async def get_payment_customization_async(payment_provider: Union[str, GetPaymentCustomizationPaymentProviderEnum], region: str, sandbox: Optional[bool] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPaymentCustomization.create(
payment_provider=payment_provider,
region=region,
sandbox=sandbox,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPaymentPublicConfig)
def get_payment_public_config(payment_provider: Union[str, GetPaymentPublicConfigPaymentProviderEnum], region: str, sandbox: Optional[bool] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPaymentPublicConfig.create(
payment_provider=payment_provider,
region=region,
sandbox=sandbox,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPaymentPublicConfig)
async def get_payment_public_config_async(payment_provider: Union[str, GetPaymentPublicConfigPaymentProviderEnum], region: str, sandbox: Optional[bool] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPaymentPublicConfig.create(
payment_provider=payment_provider,
region=region,
sandbox=sandbox,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPaymentTaxValue)
def get_payment_tax_value(payment_order_no: str, payment_provider: Union[str, GetPaymentTaxValuePaymentProviderEnum], zip_code: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPaymentTaxValue.create(
payment_order_no=payment_order_no,
payment_provider=payment_provider,
zip_code=zip_code,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPaymentTaxValue)
async def get_payment_tax_value_async(payment_order_no: str, payment_provider: Union[str, GetPaymentTaxValuePaymentProviderEnum], zip_code: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPaymentTaxValue.create(
payment_order_no=payment_order_no,
payment_provider=payment_provider,
zip_code=zip_code,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(Pay)
def pay(payment_order_no: str, body: Optional[PaymentToken] = None, payment_provider: Optional[Union[str, PayPaymentProviderEnum]] = None, zip_code: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = Pay.create(
payment_order_no=payment_order_no,
body=body,
payment_provider=payment_provider,
zip_code=zip_code,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(Pay)
async def pay_async(payment_order_no: str, body: Optional[PaymentToken] = None, payment_provider: Optional[Union[str, PayPaymentProviderEnum]] = None, zip_code: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = Pay.create(
payment_order_no=payment_order_no,
body=body,
payment_provider=payment_provider,
zip_code=zip_code,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicCheckPaymentOrderPaidStatus)
def public_check_payment_order_paid_status(payment_order_no: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicCheckPaymentOrderPaidStatus.create(
payment_order_no=payment_order_no,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicCheckPaymentOrderPaidStatus)
async def public_check_payment_order_paid_status_async(payment_order_no: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicCheckPaymentOrderPaidStatus.create(
payment_order_no=payment_order_no,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetPaymentMethods)
def public_get_payment_methods(payment_order_no: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetPaymentMethods.create(
payment_order_no=payment_order_no,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetPaymentMethods)
async def public_get_payment_methods_async(payment_order_no: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetPaymentMethods.create(
payment_order_no=payment_order_no,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetPaymentUrl)
def public_get_payment_url(body: Optional[PaymentUrlCreate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetPaymentUrl.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetPaymentUrl)
async def public_get_payment_url_async(body: Optional[PaymentUrlCreate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetPaymentUrl.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetQRCode)
def public_get_qr_code(code: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetQRCode.create(
code=code,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetQRCode)
async def public_get_qr_code_async(code: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetQRCode.create(
code=code,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetUnpaidPaymentOrder)
def public_get_unpaid_payment_order(payment_order_no: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetUnpaidPaymentOrder.create(
payment_order_no=payment_order_no,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicGetUnpaidPaymentOrder)
async def public_get_unpaid_payment_order_async(payment_order_no: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicGetUnpaidPaymentOrder.create(
payment_order_no=payment_order_no,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicNormalizePaymentReturnUrl)
def public_normalize_payment_return_url(order_no: str, payment_order_no: str, payment_provider: Union[str, PublicNormalizePaymentReturnUrlPaymentProviderEnum], return_url: str, foreinginvoice: Optional[str] = None, invoice_id: Optional[str] = None, payer_id: Optional[str] = None, payload: Optional[str] = None, redirect_result: Optional[str] = None, result_code: Optional[str] = None, session_id: Optional[str] = None, status: Optional[str] = None, token: Optional[str] = None, type_: Optional[str] = None, user_id: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicNormalizePaymentReturnUrl.create(
order_no=order_no,
payment_order_no=payment_order_no,
payment_provider=payment_provider,
return_url=return_url,
foreinginvoice=foreinginvoice,
invoice_id=invoice_id,
payer_id=payer_id,
payload=payload,
redirect_result=redirect_result,
result_code=result_code,
session_id=session_id,
status=status,
token=token,
type_=type_,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicNormalizePaymentReturnUrl)
async def public_normalize_payment_return_url_async(order_no: str, payment_order_no: str, payment_provider: Union[str, PublicNormalizePaymentReturnUrlPaymentProviderEnum], return_url: str, foreinginvoice: Optional[str] = None, invoice_id: Optional[str] = None, payer_id: Optional[str] = None, payload: Optional[str] = None, redirect_result: Optional[str] = None, result_code: Optional[str] = None, session_id: Optional[str] = None, status: Optional[str] = None, token: Optional[str] = None, type_: Optional[str] = None, user_id: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicNormalizePaymentReturnUrl.create(
order_no=order_no,
payment_order_no=payment_order_no,
payment_provider=payment_provider,
return_url=return_url,
foreinginvoice=foreinginvoice,
invoice_id=invoice_id,
payer_id=payer_id,
payload=payload,
redirect_result=redirect_result,
result_code=result_code,
session_id=session_id,
status=status,
token=token,
type_=type_,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
| 44.532432
| 649
| 0.742793
| 1,874
| 16,477
| 6.27588
| 0.079509
| 0.039282
| 0.058668
| 0.040813
| 0.855114
| 0.77893
| 0.768047
| 0.761925
| 0.753932
| 0.753932
| 0
| 0.000293
| 0.171451
| 16,477
| 369
| 650
| 44.653117
| 0.861193
| 0.046489
| 0
| 0.752475
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033003
| false
| 0
| 0.115512
| 0
| 0.280528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a6f690936d017dd16c4fce8096118163820f45ef
| 17,458
|
py
|
Python
|
Main_Server/nav.py
|
aby03/Geometric-Reconstruction
|
aea05fec0332530ecb2986201223b05f3d91633e
|
[
"MIT"
] | null | null | null |
Main_Server/nav.py
|
aby03/Geometric-Reconstruction
|
aea05fec0332530ecb2986201223b05f3d91633e
|
[
"MIT"
] | null | null | null |
Main_Server/nav.py
|
aby03/Geometric-Reconstruction
|
aea05fec0332530ecb2986201223b05f3d91633e
|
[
"MIT"
] | null | null | null |
elements = []
#for i in range (0,98):
# elements.append([])
# index 0 for lift, 1 for toilet, 2 for stairs
#from 0
elements.append(["turn left and go straight 2 to 3 metres","turn right and go straight 4 to 5 metres","turn left and go straight 7 to 8 metres"])
#from 1 to 10
elements.append(["turn back and move 5 to 6 metres then take right","turn back and move 5 to 6 metres then take right","turn back and move 5 to 6 metres and then take left"])
elements.append(["turn right and move 5 to 6 metres then take right", "turn right and move 5 to 6 metres then take right", "turn right and move 5 to 6 metres then take left"])
elements.append(["turn right and move 5 to 6 metres then take right", "turn right and move 5 to 6 metres then take right", "turn right 5 to 6 metres and move then take left"])
elements.append(["turn back and move 4 to 5 metres then take right","turn back and move 4 to 5 metres then take right","turn back and move 4 to 5 metres and then take left"])
elements.append(["turn back and move 3 to 4 metres then take right","turn back and move 3 to 4 metres then take right","turn back and move 3 to 4 metres and then take left"])
elements.append(["turn back and move 1 to 2 metres then take right", "turn back and move 1 to 2 metres then take right","turn back move 1 to 2 metres and then take left"])
elements.append(["turn back and move 1 metre then take right", "turn back and move 1 metre then take right","turn back and move 1 metre then take left"])
elements.append(["turn back and move 5 to 6 metres","turn back and move around 10 metres","turn slight right and move 3 metres"])
elements.append(["turn back and move 5 metres", "turn back and move 10 metres", "move straight 4 metres"])
elements.append(["turn left and move 6 metres", "turn left and move 11 metres","turn right and move 4 metres"])
#from 11 to 20
elements.append(["turn right and move 6 metres","turn right and move 11 metres","turn left and move 4 metres"])
elements.append(["turn back and move 7 metres", "turn back and move 15 metres", "go straight 2 metres"])
elements.append(["turn back and move 7 metres", "turn back and move 18 metres", "Straight ahead"])
elements.append(["turn right and move 7 metres","turn right and move 18 metres","turn left and move 1 metre"])
elements.append(["turn back and move 4 metres","turn back and move 10 metres","take slight left and go straight 4 metres"])
elements.append(["turn back and move 2 metres and then take right","turn back and move 7 metres and then take left","take slight left and go straight 7 metres"])
elements.append(["turn back and move 3 metres and then take right","turn back and move 10 metres and then take left","take slight right and go straight 5 metres"])
elements.append(["turn right and move 3 metres and then take right","turn right and move 10 metres and then take left","turn left and move 6 metres"])
elements.append(["turn right and move 3 metres and then take right","turn right and move 10 metres and then take left","turn left and move 6 metres"])
elements.append(["On the left","turn back and move 7 metres and then take left","take slight right and go straight 8 metres"])
#from 21 to 30
elements.append(["Straight ahead","turn left and move straight 7 metres", "turn right and move straight 9 metres"])
elements.append(["Straight ahead","turn left and move straight 7 metres", "turn right and move straight 9 metres"])
elements.append(["Straight ahead","turn left and move straight 7 metres", "turn right and move straight 9 metres"])
elements.append(["Straight ahead","turn left and move straight 7 metres", "turn right and move straight 9 metres"])
elements.append(["turn back", "turn right and move 7 metres", "turn left and move 9 metres"])
elements.append(["go straight 3 metres and turn left","turn back and move 2 metres and then take left","go straight 10 metres"])
elements.append(["turn left and move 3 metres and turn left","turn right and move 3 metres and take left","turn left and move 10 metres"])
elements.append(["turn left and move 3 metres and turn left","turn right and move 3 metres and take left","turn left and move 10 metres"])
elements.append(["walk straight a few steps","turn right and move","walk straight"])
elements.append(["turn left and move 5 metres and turn left","walk straight 3 metres and then take right","turn left and move 15 metres"])
#for 31 to 40
elements.append(["turn back and move 3 metres and take right", "move straight and take slight right", "turn back and move 3 metres and take right, then move straight"])
elements.append(["turn back and move 3 metres and take right", "Take right", "turn back and move 3 metres and take right, then move straight"])
elements.append(["move right 5 metres and then take right","Straight Ahead","take right and move forward 5 metres and then again take right"])
elements.append(["move right and then forward","turn left and move forward","take right and move forward and then again take right"])
elements.append(["move right and then forward 3 metres and then take right","turn slight left and move forward","take right and move forward 3 metres and then again take right"])
elements.append(["turn left and move forward for 4 steps and then take left and move 5 metre","turn back and move straight for about 5 metres","turn back and move for about 2 metre then turn left and walk for 20 metres"])
elements.append(["move right and then forward 2 metres and then take right","turn left and move forward 2 metres then take right","take right and move forward 2 metres and then again take right"])
elements.append(["move right and then forward 1 metre and then take right","turn left and move forward 2 metres then take right","take right and move forward 2 metres and then again take right"])
elements.append(["turn back and move 7 metres and then take left","turn left and move forward 6 metres and then take right","turn back and move forward 15 metres"])
elements.append(["turn back and move 7 metres and then take left","turn left and move forward 6 metres and then take right","turn back and move forward 15 metres"])
#for 41 to 50
elements.append(["turn left and move forward for 2 steps and then take left and move 5 metre","turn back and move straight for about 5 metres","turn back and move for about 2 metre then turn left and walk for 20 metres"])
elements.append(["turn left and move forward for 2 steps and then take left and move 5 metre","turn back and move straight for about 5 metres","turn back and move for about 2 metre then turn left and walk for 20 metres"])
elements.append(["turn left and move forward for 2 steps and then take left and move 5 metre","turn back and move straight for about 5 metres","turn back and move for about 2 metre then turn left and walk for 20 metres"])
elements.append(["turn left and move forward for 2 steps and then take left and move 5 metre","turn back and move straight for about 7 metres","turn back and move for about 3 metre then turn left and walk for 20 metres"])
elements.append(["turn left and move forward for 3 steps and then take left and move 5 metre","turn back and move straight for about 7 metres","turn back and move for about 3 metre then turn left and walk for 20 metres"])
elements.append(["move straight for about 5 metres and then take left and go about 5 metres", "move straight for about 10 metres","move straight for about 5 metres and then take left and move for about 20 metres"])
elements.append(["turn right and move straight for 5 metres and then take left and move about 5 metres","turn left and then move straight for about 10 metres","turn left and move for about 5 metres and then take left and move for about 20 metres"])
elements.append(["turn back and move a few steps and take left and move for about 5 metres","turn back and then move forward for about 8 metres","turn back and move some steps and then take left and move forward for about 20 meters"])
elements.append(["turn back move forward for about 5 metres then take right and move 5 metres","turn back and move for about 5 metres and then take right and move for about 15 metres","turn back and move forward for about 4 metres then take left and move around 5 meters"])
elements.append(["turn left and move forward then take right","turn left and move forward then take right", "turn left and move forward then take left"])
#for 51 to 60
elements.append(["turn back and move a metre then take right and move for about 5 metres","turn back and move 2 metres then take right and move for about 15 metres","turn back and move 2 metres then take left and move for about 5 metres"])
elements.append(["turn left and then take right after a metre and walk for about 5 metre","turn left and move 2 steps then take right and go about 20 metres","turn left and go a metre then again turn left and move for about 5 metres"])
elements.append(["turn left and move for about 5 metres","turn left and move for about 10 metres","turn right and move for about 5 metres"])
elements.append(["turn back and move for about 5 metres", "turn back and move for about 15 metres", "move straight for about 5 metres"])
elements.append(["turn back and move for about 5 metres", "turn back and move for about 15 metres", "move straight for about 2 metres"])
elements.append(["turn left and go straight for about 5 metres","turn left and go straight for about 15 metres","turn right and go straight for about 5 metres"])
elements.append(["turn right and move for about 5 metres","turn right and move for about 15 metres","turn left and move for about 5 metres"])
elements.append(["turn right and move for about 5 metres" ,"turn right and move for about 15 metres","turn left and move for about 5 metres"])
elements.append(["turn right and move for about 5 metres","turn right and move for about 15 metres","turn left and move for about 5 metres"])
elements.append(["go slight straight for about 2 metre then turn right","go straight for 6 metres then take left","turn back and move straight for about 10 metres"])
#for 61 to 70
elements.append(["turn back","turn right and move for about 5 metres then take left","turn left and move for about 10 metres"])
elements.append(["turn back","turn right and move for about 5 metres then take left","turn left and move for about 10 metres"])
elements.append(["Its in front","turn left and move for about 5 metres","turn right and Move for about 15 metres"])
elements.append(["turn right","move forward for about 5 metres","turn back and move straight for about 15 metres"])
elements.append(["turn back and move 3 metres then turn left","go straight 3 metres then take left and go for about 4 metres","turn back and go straight for about 18 metres"])
elements.append(["turn left and move for about 5 metres","turn right and move for about 5 metres","turn left and go straight for about 15 metres"])
elements.append(["turn left and move for about 5 metres","walk straight for about 5 metres","turn left and move for about 20 metres"])
elements.append(["turn back and go straight for about 5 metres","turn left and move for about 5 metres","turn back and go straight for about 20 metres"])
elements.append(["turn back and move 2 metres then take right and walk 20 metres","go slight right and move a few steps","turn back and move 2 metres then take right and go straight for about 20 metres"])
elements.append(["turn back and go straight for about 5 metres","turn slight left and go straight for 2 metres","turn back and go straight for about 20 metres"])
#from 71 to 80
elements.append(["turn right and move forward for about 5 metres","turn back and go a few steps","turn right and go straight for about 20 metres"])
elements.append(["turn right and move forward for about 5 metres","turn back and go a few steps","turn right and go straight for about 20 metres"])
elements.append(["turn left and move 2 metre and again take a left and move around 5 metres","turn left and go forward for about 5 metres","turn left and move 3 metres then again a left and go forward for about 20 metres"])
elements.append(["turn left and move 2 metre and again take a left and move around 5 metres","turn left and go forward for about 5 metres","turn left and move 3 metres then again a left and go forward for about 20 metres"])
elements.append(["turn left and move 3 metre","turn right and move 3 metres then take left","turn left and go straight for about 17 metres"])
elements.append(["turn right and go straight for 4 metres","turn left and move for a metre then take left and go around 4 metres","turn right and go straight for about 18 metres"])
elements.append(["go straight about 4 metres","turn back and move for 2 metres and then take left and go 4 metres","go straight for about 18 metres"])
elements.append(["on the left","turn back and move for about 5 metres then take left","go straight for about 15 metres"])
elements.append(["on the left","turn back and move for about 5 metres then take left","go straight for about 15 metres"])
elements.append(["turn right and move for about 4 metres","turn right and then go straight for about 15 metres then take left","turn left and go straight for about 5 metres"])
#from 81 to 90
elements.append(["turn right","go straight around 4 metres and then take left and go 3 metres","turn back and go straight for around 15 metres"])
elements.append(["go back a few steps and take left and move around 5 metres","turn slight right and move for about 2 metres","take a few steps back and then go left for about 20 metres"])
elements.append(["take left and walk for 2 metres and then take right and walk around 5 metres","turn right and go a few steps","turn left and walk around 2 metres then turn right and go straight for 20 metres"])
elements.append(["turn back and take 3 steps","turn back and go straight for around 6 metres","turn slight right and go straight for 8 metres"])
elements.append(["turn back and take around 3 steps","turn back and go straight for about 6 metres","turn slight right and go straight for about 10 metres"])
elements.append(["turn back and move for about 5 metres","turn back and move for about 15 metres","take slight right and go straight for about 5 metres"])
elements.append(["turn right and move for about 3 metres","turn right and move for about 10 metres","turn left and move for about 5 metres"])
elements.append(["turn left and move for about 3 metres", "turn left and move for about 15 metres","turn right and move for about 10 metres"])
elements.append(["turn back and move for about 5 metres","turn left and move for about 5 metres","turn back and go straight for about 20 metres"])
elements.append(["turn right and move a few steps then right and go straight for about 20 metres","turn left and move for about 2 metres","turn right and move a few steps and take right and move about 20 metres"])
#from 91 to 100
elements.append(["turn right and go 4 steps steps then take right","turn left and move 2 metre","turn right and move a few steps and take right and then go straight for 20 metres"])
elements.append(["turn right then take another right after a few steps and go around 5 metres","turn left and move around 5 steps","turn right and move 2 steps then again take right and move 20 metres straight"])
elements.append(["turn right and go about 5 steps then take right and go 8 metres","go straight 2 metre","turn right and go a few steps then turn right and walk 20 metres"])
elements.append(["turn back and move 5 steps then take right and go 20 metres","turn slight right and go 1 metre","go straight"])
elements.append(["turn back and move 5 steps then take right and go 20 metres","turn slight right and go 1 metre","go straight 1 metre"])
elements.append(["go straight and then take right and walk around 5 metres","turn back and go around 2 metres","go straight and then take right and move around 20 metres"])
elements.append(["turn back and go straight for 2 metres","turn back and go straight for 7 metres","turn right and go straight for 5 metres"])
elements.append(["turn right and move a few steps","turn left and go straight for 3 metres","turn right and go straight for around 16 metres"])
elements.append(["turn right and go straight for about 3 metres","turn back and go straight for around 2 metres and then take left","turn right and go straight for about 20 metres"])
elements.append(["turn right and go straight for 4 metres","take a left and go straight for 2 metres","turn right and go straight for around 15 metres"])
#from 101 to 102
elements.append(["go straight for 5 metres","turn right and go straight approximately 3 metres","go straight for about 15 metres"])
elements.append(["go straight for 5 metre","go straight for 10 metre","turn back and go 4 metre"])
elements.append(["turn right","go straight 6 metre and turn right","turn back and go approximate 8 metre"])
elements.append(["turn right","turn slight right and go 6 metre then take left","turn back and walk 7 metre"])
elements.append(["turn back and walk 4 metre","go straight 3 metre","turn back and walk 15 metre"])
elements.append(["turn left and walk 5 metre","go straight 3 metre","turn left and walk 15 metre"])
elements.append(["go straight 3 metre","turn back and take left after 3 metre","walk straight 15 metre"])
elements.append(["turn left","turn back and walk 5 metre then take left","go straight for around 10 metre"])
def navigate(index1, index2):
return elements[index1][index2]
#string=navigate(5,1)
#print(string)
| 108.434783
| 273
| 0.753809
| 3,210
| 17,458
| 4.099688
| 0.027103
| 0.112766
| 0.082751
| 0.084347
| 0.914818
| 0.869909
| 0.781155
| 0.715122
| 0.669073
| 0.603495
| 0
| 0.033848
| 0.162332
| 17,458
| 160
| 274
| 109.1125
| 0.866042
| 0.015294
| 0
| 0.214286
| 0
| 0
| 0.813301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008929
| false
| 0
| 0
| 0.008929
| 0.017857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4705db8ff8b0c86c5f3fcb8cc72fadfb68a02831
| 590
|
py
|
Python
|
eval_mosmed_timm-regnetx_002_RandomBrightnessContrast.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_RandomBrightnessContrast.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_RandomBrightnessContrast.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_0_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_1_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_2_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_3_RandomBrightnessContrast.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_4_RandomBrightnessContrast.yml",
]
for l in ls:
os.system(l)
| 53.636364
| 112
| 0.859322
| 80
| 590
| 5.9625
| 0.3
| 0.104822
| 0.125786
| 0.199161
| 0.87631
| 0.87631
| 0.87631
| 0.87631
| 0.87631
| 0.87631
| 0
| 0.035778
| 0.052542
| 590
| 11
| 113
| 53.636364
| 0.817531
| 0
| 0
| 0
| 0
| 0
| 0.888325
| 0.676819
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
470933fe37dab1f28109446bb1f624e1fa9c478e
| 54,029
|
py
|
Python
|
vsts/vsts/work_item_tracking_process_definitions/v4_1/work_item_tracking_process_definitions_client.py
|
dhilmathy/azure-devops-python-api
|
d16026911f93361becb52d2f1c124d5c3e8a82e7
|
[
"MIT"
] | null | null | null |
vsts/vsts/work_item_tracking_process_definitions/v4_1/work_item_tracking_process_definitions_client.py
|
dhilmathy/azure-devops-python-api
|
d16026911f93361becb52d2f1c124d5c3e8a82e7
|
[
"MIT"
] | 37
|
2020-04-27T07:45:19.000Z
|
2021-04-05T07:27:15.000Z
|
vsts/vsts/work_item_tracking_process_definitions/v4_1/work_item_tracking_process_definitions_client.py
|
dhilmathy/azure-devops-python-api
|
d16026911f93361becb52d2f1c124d5c3e8a82e7
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...vss_client import VssClient
from . import models
class WorkItemTrackingClient(VssClient):
"""WorkItemTracking
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(WorkItemTrackingClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '5264459e-e5e0-4bd8-b118-0985e68a4ec5'
def create_behavior(self, behavior, process_id):
"""CreateBehavior.
[Preview API] Creates a single behavior in the given process.
:param :class:`<BehaviorCreateModel> <work-item-tracking.v4_1.models.BehaviorCreateModel>` behavior:
:param str process_id: The ID of the process
:rtype: :class:`<BehaviorModel> <work-item-tracking.v4_1.models.BehaviorModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
content = self._serialize.body(behavior, 'BehaviorCreateModel')
response = self._send(http_method='POST',
location_id='47a651f4-fb70-43bf-b96b-7c0ba947142b',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('BehaviorModel', response)
def delete_behavior(self, process_id, behavior_id):
"""DeleteBehavior.
[Preview API] Removes a behavior in the process.
:param str process_id: The ID of the process
:param str behavior_id: The ID of the behavior
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if behavior_id is not None:
route_values['behaviorId'] = self._serialize.url('behavior_id', behavior_id, 'str')
self._send(http_method='DELETE',
location_id='47a651f4-fb70-43bf-b96b-7c0ba947142b',
version='4.1-preview.1',
route_values=route_values)
def get_behavior(self, process_id, behavior_id):
"""GetBehavior.
[Preview API] Returns a single behavior in the process.
:param str process_id: The ID of the process
:param str behavior_id: The ID of the behavior
:rtype: :class:`<BehaviorModel> <work-item-tracking.v4_1.models.BehaviorModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if behavior_id is not None:
route_values['behaviorId'] = self._serialize.url('behavior_id', behavior_id, 'str')
response = self._send(http_method='GET',
location_id='47a651f4-fb70-43bf-b96b-7c0ba947142b',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('BehaviorModel', response)
def get_behaviors(self, process_id):
"""GetBehaviors.
[Preview API] Returns a list of all behaviors in the process.
:param str process_id: The ID of the process
:rtype: [BehaviorModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
response = self._send(http_method='GET',
location_id='47a651f4-fb70-43bf-b96b-7c0ba947142b',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('[BehaviorModel]', self._unwrap_collection(response))
def replace_behavior(self, behavior_data, process_id, behavior_id):
"""ReplaceBehavior.
[Preview API] Replaces a behavior in the process.
:param :class:`<BehaviorReplaceModel> <work-item-tracking.v4_1.models.BehaviorReplaceModel>` behavior_data:
:param str process_id: The ID of the process
:param str behavior_id: The ID of the behavior
:rtype: :class:`<BehaviorModel> <work-item-tracking.v4_1.models.BehaviorModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if behavior_id is not None:
route_values['behaviorId'] = self._serialize.url('behavior_id', behavior_id, 'str')
content = self._serialize.body(behavior_data, 'BehaviorReplaceModel')
response = self._send(http_method='PUT',
location_id='47a651f4-fb70-43bf-b96b-7c0ba947142b',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('BehaviorModel', response)
def add_control_to_group(self, control, process_id, wit_ref_name, group_id):
"""AddControlToGroup.
[Preview API] Creates a control in a group
:param :class:`<Control> <work-item-tracking.v4_1.models.Control>` control: The control
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str group_id: The ID of the group to add the control to
:rtype: :class:`<Control> <work-item-tracking.v4_1.models.Control>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
content = self._serialize.body(control, 'Control')
response = self._send(http_method='POST',
location_id='e2e3166a-627a-4e9b-85b2-d6a097bbd731',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Control', response)
def edit_control(self, control, process_id, wit_ref_name, group_id, control_id):
"""EditControl.
[Preview API] Updates a control on the work item form
:param :class:`<Control> <work-item-tracking.v4_1.models.Control>` control: The updated control
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str group_id: The ID of the group
:param str control_id: The ID of the control
:rtype: :class:`<Control> <work-item-tracking.v4_1.models.Control>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
content = self._serialize.body(control, 'Control')
response = self._send(http_method='PATCH',
location_id='e2e3166a-627a-4e9b-85b2-d6a097bbd731',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Control', response)
def remove_control_from_group(self, process_id, wit_ref_name, group_id, control_id):
"""RemoveControlFromGroup.
[Preview API] Removes a control from the work item form
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str group_id: The ID of the group
:param str control_id: The ID of the control to remove
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
self._send(http_method='DELETE',
location_id='e2e3166a-627a-4e9b-85b2-d6a097bbd731',
version='4.1-preview.1',
route_values=route_values)
def set_control_in_group(self, control, process_id, wit_ref_name, group_id, control_id, remove_from_group_id=None):
"""SetControlInGroup.
[Preview API] Moves a control to a new group
:param :class:`<Control> <work-item-tracking.v4_1.models.Control>` control: The control
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str group_id: The ID of the group to move the control to
:param str control_id: The id of the control
:param str remove_from_group_id: The group to remove the control from
:rtype: :class:`<Control> <work-item-tracking.v4_1.models.Control>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
if control_id is not None:
route_values['controlId'] = self._serialize.url('control_id', control_id, 'str')
query_parameters = {}
if remove_from_group_id is not None:
query_parameters['removeFromGroupId'] = self._serialize.query('remove_from_group_id', remove_from_group_id, 'str')
content = self._serialize.body(control, 'Control')
response = self._send(http_method='PUT',
location_id='e2e3166a-627a-4e9b-85b2-d6a097bbd731',
version='4.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Control', response)
def create_field(self, field, process_id):
"""CreateField.
[Preview API] Creates a single field in the process.
:param :class:`<FieldModel> <work-item-tracking.v4_1.models.FieldModel>` field:
:param str process_id: The ID of the process
:rtype: :class:`<FieldModel> <work-item-tracking.v4_1.models.FieldModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
content = self._serialize.body(field, 'FieldModel')
response = self._send(http_method='POST',
location_id='f36c66c7-911d-4163-8938-d3c5d0d7f5aa',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('FieldModel', response)
def update_field(self, field, process_id):
"""UpdateField.
[Preview API] Updates a given field in the process.
:param :class:`<FieldUpdate> <work-item-tracking.v4_1.models.FieldUpdate>` field:
:param str process_id: The ID of the process
:rtype: :class:`<FieldModel> <work-item-tracking.v4_1.models.FieldModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
content = self._serialize.body(field, 'FieldUpdate')
response = self._send(http_method='PATCH',
location_id='f36c66c7-911d-4163-8938-d3c5d0d7f5aa',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('FieldModel', response)
def add_group(self, group, process_id, wit_ref_name, page_id, section_id):
"""AddGroup.
[Preview API] Adds a group to the work item form
:param :class:`<Group> <work-item-tracking.v4_1.models.Group>` group: The group
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page to add the group to
:param str section_id: The ID of the section to add the group to
:rtype: :class:`<Group> <work-item-tracking.v4_1.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='POST',
location_id='2617828b-e850-4375-a92a-04855704d4c3',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Group', response)
def edit_group(self, group, process_id, wit_ref_name, page_id, section_id, group_id):
"""EditGroup.
[Preview API] Updates a group in the work item form
:param :class:`<Group> <work-item-tracking.v4_1.models.Group>` group: The updated group
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page the group is in
:param str section_id: The ID of the section the group is in
:param str group_id: The ID of the group
:rtype: :class:`<Group> <work-item-tracking.v4_1.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='PATCH',
location_id='2617828b-e850-4375-a92a-04855704d4c3',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Group', response)
def remove_group(self, process_id, wit_ref_name, page_id, section_id, group_id):
"""RemoveGroup.
[Preview API] Removes a group from the work item form
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page the group is in
:param str section_id: The ID of the section to the group is in
:param str group_id: The ID of the group
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
self._send(http_method='DELETE',
location_id='2617828b-e850-4375-a92a-04855704d4c3',
version='4.1-preview.1',
route_values=route_values)
def set_group_in_page(self, group, process_id, wit_ref_name, page_id, section_id, group_id, remove_from_page_id, remove_from_section_id):
"""SetGroupInPage.
[Preview API] Moves a group to a different page and section
:param :class:`<Group> <work-item-tracking.v4_1.models.Group>` group: The updated group
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page the group is in
:param str section_id: The ID of the section the group is in
:param str group_id: The ID of the group
:param str remove_from_page_id: ID of the page to remove the group from
:param str remove_from_section_id: ID of the section to remove the group from
:rtype: :class:`<Group> <work-item-tracking.v4_1.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
query_parameters = {}
if remove_from_page_id is not None:
query_parameters['removeFromPageId'] = self._serialize.query('remove_from_page_id', remove_from_page_id, 'str')
if remove_from_section_id is not None:
query_parameters['removeFromSectionId'] = self._serialize.query('remove_from_section_id', remove_from_section_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='PUT',
location_id='2617828b-e850-4375-a92a-04855704d4c3',
version='4.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Group', response)
def set_group_in_section(self, group, process_id, wit_ref_name, page_id, section_id, group_id, remove_from_section_id):
"""SetGroupInSection.
[Preview API] Moves a group to a different section
:param :class:`<Group> <work-item-tracking.v4_1.models.Group>` group: The updated group
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page the group is in
:param str section_id: The ID of the section the group is in
:param str group_id: The ID of the group
:param str remove_from_section_id: ID of the section to remove the group from
:rtype: :class:`<Group> <work-item-tracking.v4_1.models.Group>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
if section_id is not None:
route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str')
if group_id is not None:
route_values['groupId'] = self._serialize.url('group_id', group_id, 'str')
query_parameters = {}
if remove_from_section_id is not None:
query_parameters['removeFromSectionId'] = self._serialize.query('remove_from_section_id', remove_from_section_id, 'str')
content = self._serialize.body(group, 'Group')
response = self._send(http_method='PUT',
location_id='2617828b-e850-4375-a92a-04855704d4c3',
version='4.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Group', response)
def get_form_layout(self, process_id, wit_ref_name):
"""GetFormLayout.
[Preview API] Gets the form layout
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<FormLayout> <work-item-tracking.v4_1.models.FormLayout>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='3eacc80a-ddca-4404-857a-6331aac99063',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('FormLayout', response)
def get_lists_metadata(self):
"""GetListsMetadata.
[Preview API] Returns meta data of the picklist.
:rtype: [PickListMetadataModel]
"""
response = self._send(http_method='GET',
location_id='b45cc931-98e3-44a1-b1cd-2e8e9c6dc1c6',
version='4.1-preview.1')
return self._deserialize('[PickListMetadataModel]', self._unwrap_collection(response))
def create_list(self, picklist):
"""CreateList.
[Preview API] Creates a picklist.
:param :class:`<PickListModel> <work-item-tracking.v4_1.models.PickListModel>` picklist:
:rtype: :class:`<PickListModel> <work-item-tracking.v4_1.models.PickListModel>`
"""
content = self._serialize.body(picklist, 'PickListModel')
response = self._send(http_method='POST',
location_id='0b6179e2-23ce-46b2-b094-2ffa5ee70286',
version='4.1-preview.1',
content=content)
return self._deserialize('PickListModel', response)
def delete_list(self, list_id):
"""DeleteList.
[Preview API] Removes a picklist.
:param str list_id: The ID of the list
"""
route_values = {}
if list_id is not None:
route_values['listId'] = self._serialize.url('list_id', list_id, 'str')
self._send(http_method='DELETE',
location_id='0b6179e2-23ce-46b2-b094-2ffa5ee70286',
version='4.1-preview.1',
route_values=route_values)
def get_list(self, list_id):
"""GetList.
[Preview API] Returns a picklist.
:param str list_id: The ID of the list
:rtype: :class:`<PickListModel> <work-item-tracking.v4_1.models.PickListModel>`
"""
route_values = {}
if list_id is not None:
route_values['listId'] = self._serialize.url('list_id', list_id, 'str')
response = self._send(http_method='GET',
location_id='0b6179e2-23ce-46b2-b094-2ffa5ee70286',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('PickListModel', response)
def update_list(self, picklist, list_id):
"""UpdateList.
[Preview API] Updates a list.
:param :class:`<PickListModel> <work-item-tracking.v4_1.models.PickListModel>` picklist:
:param str list_id: The ID of the list
:rtype: :class:`<PickListModel> <work-item-tracking.v4_1.models.PickListModel>`
"""
route_values = {}
if list_id is not None:
route_values['listId'] = self._serialize.url('list_id', list_id, 'str')
content = self._serialize.body(picklist, 'PickListModel')
response = self._send(http_method='PUT',
location_id='0b6179e2-23ce-46b2-b094-2ffa5ee70286',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('PickListModel', response)
def add_page(self, page, process_id, wit_ref_name):
"""AddPage.
[Preview API] Adds a page to the work item form
:param :class:`<Page> <work-item-tracking.v4_1.models.Page>` page: The page
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<Page> <work-item-tracking.v4_1.models.Page>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(page, 'Page')
response = self._send(http_method='POST',
location_id='1b4ac126-59b2-4f37-b4df-0a48ba807edb',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Page', response)
def edit_page(self, page, process_id, wit_ref_name):
"""EditPage.
[Preview API] Updates a page on the work item form
:param :class:`<Page> <work-item-tracking.v4_1.models.Page>` page: The page
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<Page> <work-item-tracking.v4_1.models.Page>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(page, 'Page')
response = self._send(http_method='PATCH',
location_id='1b4ac126-59b2-4f37-b4df-0a48ba807edb',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Page', response)
def remove_page(self, process_id, wit_ref_name, page_id):
"""RemovePage.
[Preview API] Removes a page from the work item form
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str page_id: The ID of the page
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if page_id is not None:
route_values['pageId'] = self._serialize.url('page_id', page_id, 'str')
self._send(http_method='DELETE',
location_id='1b4ac126-59b2-4f37-b4df-0a48ba807edb',
version='4.1-preview.1',
route_values=route_values)
def create_state_definition(self, state_model, process_id, wit_ref_name):
"""CreateStateDefinition.
[Preview API] Creates a state definition in the work item type of the process.
:param :class:`<WorkItemStateInputModel> <work-item-tracking.v4_1.models.WorkItemStateInputModel>` state_model:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<WorkItemStateResultModel> <work-item-tracking.v4_1.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(state_model, 'WorkItemStateInputModel')
response = self._send(http_method='POST',
location_id='4303625d-08f4-4461-b14b-32c65bba5599',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemStateResultModel', response)
def delete_state_definition(self, process_id, wit_ref_name, state_id):
"""DeleteStateDefinition.
[Preview API] Removes a state definition in the work item type of the process.
:param str process_id: ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: ID of the state
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
self._send(http_method='DELETE',
location_id='4303625d-08f4-4461-b14b-32c65bba5599',
version='4.1-preview.1',
route_values=route_values)
def get_state_definition(self, process_id, wit_ref_name, state_id):
"""GetStateDefinition.
[Preview API] Returns a state definition in the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: The ID of the state
:rtype: :class:`<WorkItemStateResultModel> <work-item-tracking.v4_1.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
response = self._send(http_method='GET',
location_id='4303625d-08f4-4461-b14b-32c65bba5599',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('WorkItemStateResultModel', response)
def get_state_definitions(self, process_id, wit_ref_name):
"""GetStateDefinitions.
[Preview API] Returns a list of all state definitions in the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: [WorkItemStateResultModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
response = self._send(http_method='GET',
location_id='4303625d-08f4-4461-b14b-32c65bba5599',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('[WorkItemStateResultModel]', self._unwrap_collection(response))
def hide_state_definition(self, hide_state_model, process_id, wit_ref_name, state_id):
"""HideStateDefinition.
[Preview API] Hides a state definition in the work item type of the process.
:param :class:`<HideStateModel> <work-item-tracking.v4_1.models.HideStateModel>` hide_state_model:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: The ID of the state
:rtype: :class:`<WorkItemStateResultModel> <work-item-tracking.v4_1.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
content = self._serialize.body(hide_state_model, 'HideStateModel')
response = self._send(http_method='PUT',
location_id='4303625d-08f4-4461-b14b-32c65bba5599',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemStateResultModel', response)
def update_state_definition(self, state_model, process_id, wit_ref_name, state_id):
"""UpdateStateDefinition.
[Preview API] Updates a given state definition in the work item type of the process.
:param :class:`<WorkItemStateInputModel> <work-item-tracking.v4_1.models.WorkItemStateInputModel>` state_model:
:param str process_id: ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: ID of the state
:rtype: :class:`<WorkItemStateResultModel> <work-item-tracking.v4_1.models.WorkItemStateResultModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
if state_id is not None:
route_values['stateId'] = self._serialize.url('state_id', state_id, 'str')
content = self._serialize.body(state_model, 'WorkItemStateInputModel')
response = self._send(http_method='PATCH',
location_id='4303625d-08f4-4461-b14b-32c65bba5599',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemStateResultModel', response)
def add_behavior_to_work_item_type(self, behavior, process_id, wit_ref_name_for_behaviors):
"""AddBehaviorToWorkItemType.
[Preview API] Adds a behavior to the work item type of the process.
:param :class:`<WorkItemTypeBehavior> <work-item-tracking.v4_1.models.WorkItemTypeBehavior>` behavior:
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:rtype: :class:`<WorkItemTypeBehavior> <work-item-tracking.v4_1.models.WorkItemTypeBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
content = self._serialize.body(behavior, 'WorkItemTypeBehavior')
response = self._send(http_method='POST',
location_id='921dfb88-ef57-4c69-94e5-dd7da2d7031d',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTypeBehavior', response)
def get_behavior_for_work_item_type(self, process_id, wit_ref_name_for_behaviors, behavior_ref_name):
"""GetBehaviorForWorkItemType.
[Preview API] Returns a behavior for the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:param str behavior_ref_name: The reference name of the behavior
:rtype: :class:`<WorkItemTypeBehavior> <work-item-tracking.v4_1.models.WorkItemTypeBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
response = self._send(http_method='GET',
location_id='921dfb88-ef57-4c69-94e5-dd7da2d7031d',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('WorkItemTypeBehavior', response)
def get_behaviors_for_work_item_type(self, process_id, wit_ref_name_for_behaviors):
"""GetBehaviorsForWorkItemType.
[Preview API] Returns a list of all behaviors for the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:rtype: [WorkItemTypeBehavior]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
response = self._send(http_method='GET',
location_id='921dfb88-ef57-4c69-94e5-dd7da2d7031d',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('[WorkItemTypeBehavior]', self._unwrap_collection(response))
def remove_behavior_from_work_item_type(self, process_id, wit_ref_name_for_behaviors, behavior_ref_name):
"""RemoveBehaviorFromWorkItemType.
[Preview API] Removes a behavior for the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:param str behavior_ref_name: The reference name of the behavior
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
if behavior_ref_name is not None:
route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str')
self._send(http_method='DELETE',
location_id='921dfb88-ef57-4c69-94e5-dd7da2d7031d',
version='4.1-preview.1',
route_values=route_values)
def update_behavior_to_work_item_type(self, behavior, process_id, wit_ref_name_for_behaviors):
"""UpdateBehaviorToWorkItemType.
[Preview API] Updates a behavior for the work item type of the process.
:param :class:`<WorkItemTypeBehavior> <work-item-tracking.v4_1.models.WorkItemTypeBehavior>` behavior:
:param str process_id: The ID of the process
:param str wit_ref_name_for_behaviors: Work item type reference name for the behavior
:rtype: :class:`<WorkItemTypeBehavior> <work-item-tracking.v4_1.models.WorkItemTypeBehavior>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_behaviors is not None:
route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str')
content = self._serialize.body(behavior, 'WorkItemTypeBehavior')
response = self._send(http_method='PATCH',
location_id='921dfb88-ef57-4c69-94e5-dd7da2d7031d',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTypeBehavior', response)
def create_work_item_type(self, work_item_type, process_id):
"""CreateWorkItemType.
[Preview API] Creates a work item type in the process.
:param :class:`<WorkItemTypeModel> <work-item-tracking.v4_1.models.WorkItemTypeModel>` work_item_type:
:param str process_id: The ID of the process
:rtype: :class:`<WorkItemTypeModel> <work-item-tracking.v4_1.models.WorkItemTypeModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
content = self._serialize.body(work_item_type, 'WorkItemTypeModel')
response = self._send(http_method='POST',
location_id='1ce0acad-4638-49c3-969c-04aa65ba6bea',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTypeModel', response)
def delete_work_item_type(self, process_id, wit_ref_name):
"""DeleteWorkItemType.
[Preview API] Removes a work itewm type in the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
self._send(http_method='DELETE',
location_id='1ce0acad-4638-49c3-969c-04aa65ba6bea',
version='4.1-preview.1',
route_values=route_values)
def get_work_item_type(self, process_id, wit_ref_name, expand=None):
"""GetWorkItemType.
[Preview API] Returns a work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str expand:
:rtype: :class:`<WorkItemTypeModel> <work-item-tracking.v4_1.models.WorkItemTypeModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='1ce0acad-4638-49c3-969c-04aa65ba6bea',
version='4.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('WorkItemTypeModel', response)
def get_work_item_types(self, process_id, expand=None):
"""GetWorkItemTypes.
[Preview API] Returns a list of all work item types in the process.
:param str process_id: The ID of the process
:param str expand:
:rtype: [WorkItemTypeModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query('expand', expand, 'str')
response = self._send(http_method='GET',
location_id='1ce0acad-4638-49c3-969c-04aa65ba6bea',
version='4.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemTypeModel]', self._unwrap_collection(response))
def update_work_item_type(self, work_item_type_update, process_id, wit_ref_name):
"""UpdateWorkItemType.
[Preview API] Updates a work item type of the process.
:param :class:`<WorkItemTypeUpdateModel> <work-item-tracking.v4_1.models.WorkItemTypeUpdateModel>` work_item_type_update:
:param str process_id: The ID of the process
:param str wit_ref_name: The reference name of the work item type
:rtype: :class:`<WorkItemTypeModel> <work-item-tracking.v4_1.models.WorkItemTypeModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(work_item_type_update, 'WorkItemTypeUpdateModel')
response = self._send(http_method='PATCH',
location_id='1ce0acad-4638-49c3-969c-04aa65ba6bea',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTypeModel', response)
def add_field_to_work_item_type(self, field, process_id, wit_ref_name_for_fields):
"""AddFieldToWorkItemType.
[Preview API] Adds a field to the work item type in the process.
:param :class:`<WorkItemTypeFieldModel> <work-item-tracking.v4_1.models.WorkItemTypeFieldModel>` field:
:param str process_id: The ID of the process
:param str wit_ref_name_for_fields: Work item type reference name for the field
:rtype: :class:`<WorkItemTypeFieldModel> <work-item-tracking.v4_1.models.WorkItemTypeFieldModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_fields is not None:
route_values['witRefNameForFields'] = self._serialize.url('wit_ref_name_for_fields', wit_ref_name_for_fields, 'str')
content = self._serialize.body(field, 'WorkItemTypeFieldModel')
response = self._send(http_method='POST',
location_id='976713b4-a62e-499e-94dc-eeb869ea9126',
version='4.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('WorkItemTypeFieldModel', response)
def get_work_item_type_field(self, process_id, wit_ref_name_for_fields, field_ref_name):
"""GetWorkItemTypeField.
[Preview API] Returns a single field in the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_fields: Work item type reference name for fields
:param str field_ref_name: The reference name of the field
:rtype: :class:`<WorkItemTypeFieldModel> <work-item-tracking.v4_1.models.WorkItemTypeFieldModel>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_fields is not None:
route_values['witRefNameForFields'] = self._serialize.url('wit_ref_name_for_fields', wit_ref_name_for_fields, 'str')
if field_ref_name is not None:
route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str')
response = self._send(http_method='GET',
location_id='976713b4-a62e-499e-94dc-eeb869ea9126',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('WorkItemTypeFieldModel', response)
def get_work_item_type_fields(self, process_id, wit_ref_name_for_fields):
"""GetWorkItemTypeFields.
[Preview API] Returns a list of all fields in the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_fields: Work item type reference name for fields
:rtype: [WorkItemTypeFieldModel]
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_fields is not None:
route_values['witRefNameForFields'] = self._serialize.url('wit_ref_name_for_fields', wit_ref_name_for_fields, 'str')
response = self._send(http_method='GET',
location_id='976713b4-a62e-499e-94dc-eeb869ea9126',
version='4.1-preview.1',
route_values=route_values)
return self._deserialize('[WorkItemTypeFieldModel]', self._unwrap_collection(response))
def remove_field_from_work_item_type(self, process_id, wit_ref_name_for_fields, field_ref_name):
"""RemoveFieldFromWorkItemType.
[Preview API] Removes a field in the work item type of the process.
:param str process_id: The ID of the process
:param str wit_ref_name_for_fields: Work item type reference name for fields
:param str field_ref_name: The reference name of the field
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name_for_fields is not None:
route_values['witRefNameForFields'] = self._serialize.url('wit_ref_name_for_fields', wit_ref_name_for_fields, 'str')
if field_ref_name is not None:
route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str')
self._send(http_method='DELETE',
location_id='976713b4-a62e-499e-94dc-eeb869ea9126',
version='4.1-preview.1',
route_values=route_values)
| 56.04668
| 141
| 0.624461
| 6,607
| 54,029
| 4.857121
| 0.045255
| 0.080895
| 0.0483
| 0.04668
| 0.894394
| 0.875105
| 0.856782
| 0.84787
| 0.82238
| 0.806363
| 0
| 0.030441
| 0.271595
| 54,029
| 963
| 142
| 56.104881
| 0.784957
| 0.009976
| 0
| 0.87037
| 0
| 0
| 0.165637
| 0.064451
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.005051
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4724d80261859f7757d95177a746199625116d32
| 21,176
|
py
|
Python
|
core.py
|
LAION-AI/crawlingathome
|
43a477777fb403046d67224747cde1dac9f2094a
|
[
"MIT"
] | 11
|
2021-06-02T03:46:52.000Z
|
2021-09-11T22:19:12.000Z
|
core.py
|
LAION-AI/crawlingathome
|
43a477777fb403046d67224747cde1dac9f2094a
|
[
"MIT"
] | 9
|
2021-06-14T07:46:20.000Z
|
2021-08-28T22:50:46.000Z
|
core.py
|
LAION-AI/crawlingathome
|
43a477777fb403046d67224747cde1dac9f2094a
|
[
"MIT"
] | 7
|
2021-06-01T11:59:36.000Z
|
2022-03-20T13:44:18.000Z
|
##############################
# Crawling@Home Client #
# (c) Theo Coombes, 2021 #
# TheoCoombes/crawlingathome #
##############################
from requests import session, Response
from typing import Optional, Union
from time import sleep
import numpy as np
import logging
import tarfile
import shutil
import gzip
import os
from .errors import *
logging.basicConfig(format="[%(asctime)s crawling@home] %(message)s", datefmt="%H:%M", level=logging.INFO)
_builtin_print = print
def print(message) -> None:
logging.info(message)
def _safe_request(function, *args, **kwargs) -> Response:
try:
return function(*args, **kwargs)
except Exception as e:
print(f"retrying request after {e} error...")
sleep(60)
return _safe_request(function, *args, **kwargs)
def _handle_exceptions(status_code: int, text: str) -> Optional[Exception]:
if status_code == 200:
return None
elif status_code == 400:
return ValueError(f"[crawling@home] {text} (status {status_code})")
elif status_code == 403:
return ZeroJobError(f"[crawling@home] {text} (status {status_code})")
elif status_code == 404:
return WorkerTimedOutError(f"[crawling@home] {text} (status {status_code})")
else:
return ServerError(f"[crawling@home] {text} (status {status_code})")
# The main 'hybrid' client instance.
class HybridClient:
def __init__(self, url, nickname, _recycled=False) -> None:
if _recycled:
return
if url[-1] != "/":
url += "/"
self.s = session()
self.url = url
self.type = "HYBRID"
self.nickname = nickname
print("connecting to crawling@home server...")
payload = {"nickname": nickname, "type": "HYBRID"}
r = _safe_request(self.s.get, self.url + "api/new", params=payload)
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
print("connected to crawling@home server")
data = r.json()
self.token = data["token"]
self.display_name = data["display_name"]
self.upload_address = data["upload_address"]
print(f"worker name: {self.display_name}")
_builtin_print("\n\n")
print(f"You can view this worker's progress at {self.url + 'worker/hybrid/' + self.display_name}\n")
# Finds the amount of available jobs from the server, returning an integer.
def updateUploadServer(self) -> None:
r = _safe_request(self.s.get, self.url + "api/getUploadAddress", params={"type": "HYBRID"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
self.upload_address = r.text
print("updated upload server address")
# Updates the upload server.
def jobCount(self) -> int:
r = _safe_request(self.s.get, self.url + "api/jobCount", params={"type": "HYBRID"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
count = int(r.text)
print(f"jobs remaining: {count}")
return count
# Makes the node send a request to the server, asking for a new job.
def newJob(self) -> None:
print("looking for new job...")
r = _safe_request(self.s.post, self.url + "api/newJob", json={"token": self.token, "type": "HYBRID"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
else:
data = r.json()
self.shard = data["url"]
self.start_id = np.int64(data["start_id"])
self.end_id = np.int64(data["end_id"])
self.shard_piece = data["shard"]
print("recieved new job")
# Downloads the current job's shard to the current directory (./shard.wat)
def downloadShard(self, path="") -> None:
print("downloading shard...")
self.log("Downloading shard", noprint=True)
with self.s.get(self.shard, stream=True) as r:
r.raise_for_status()
with open(path + "temp.gz", 'w+b') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
with gzip.open(path + 'temp.gz', 'rb') as f_in:
with open(path + 'shard.wat', 'w+b') as f_out:
shutil.copyfileobj(f_in, f_out)
sleep(1) # Causes errors otherwise?
os.remove(path + "temp.gz")
self.log("Downloaded shard", noprint=True)
print("finished downloading shard")
# Marks a job as completed/done.
def completeJob(self, total_scraped : int) -> None:
r = _safe_request(self.s.post, self.url + "api/markAsDone", json={"token": self.token, "count": total_scraped, "type": "HYBRID"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
print("marked job as done")
# Wrapper for `completeJob` (for older workers)
def _markjobasdone(self, total_scraped : int) -> None:
print("WARNING: avoid using `_markjobasdone(...)` and instead use `completeJob(...)` to mark a job as done.")
self.completeJob(total_scraped)
# Logs the string progress into the server.
def log(self, progress : str, crashed=False, noprint=False) -> None:
data = {"token": self.token, "progress": progress, "type": "HYBRID"}
r = _safe_request(self.s.post, self.url + "api/updateProgress", json=data)
exc = _handle_exceptions(r.status_code, r.text)
if exc and not crashed:
self.log("Crashed", crashed=True)
raise exc
if not crashed and not noprint:
print(f"logged new progress data: {progress}")
# Client wrapper for `recycler.dump`.
def dump(self) -> dict:
from .recycler import dump as _dump
return _dump(self)
def recreate(self) -> None:
print("recreating client instance...")
new = HybridClient(self.url, self.nickname)
self.token = new.token
self.display_name = new.display_name
self.upload_address = new.upload_address
# Returns True if the worker is still alive, otherwise returns False.
def isAlive(self) -> bool:
r = _safe_request(self.s.post, self.url + "api/validateWorker", json={"token": self.token, "type": "HYBRID"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
else:
return ("True" in r.text)
# Removes the node instance from the server, ending all current jobs.
def bye(self) -> None:
_safe_request(self.s.post, self.url + "api/bye", json={"token": self.token, "type": "HYBRID"})
print("closed worker")
# The CPU client instance.
# Programatically similar to `HybridClient`, with different completion functions.
class CPUClient:
def __init__(self, url, nickname, _recycled=False) -> None:
if _recycled:
return
if url[-1] != "/":
url += "/"
self.s = session()
self.url = url
self.type = "CPU"
self.nickname = nickname
print("connecting to crawling@home server...")
payload = {"nickname": nickname, "type": "CPU"}
r = _safe_request(self.s.get, self.url + "api/new", params=payload)
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
print("connected to crawling@home server")
data = r.json()
self.token = data["token"]
self.display_name = data["display_name"]
self.upload_address = data["upload_address"]
print(f"worker name: {self.display_name}")
_builtin_print("\n\n")
print(f"You can view this worker's progress at {self.url + 'worker/cpu/' + self.display_name}\n")
# Finds the amount of available jobs from the server, returning an integer.
def updateUploadServer(self) -> None:
r = _safe_request(self.s.get, self.url + "api/getUploadAddress", params={"type": "CPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
self.upload_address = r.text
print("updated upload server address")
# Finds the amount of available jobs from the server, returning an integer.
def jobCount(self) -> int:
r = _safe_request(self.s.get, self.url + "api/jobCount", params={"type": "CPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
count = int(r.text)
print(f"jobs remaining: {count}")
return count
# Makes the node send a request to the server, asking for a new job.
def newJob(self) -> None:
print("looking for new job...")
r = _safe_request(self.s.post, self.url + "api/newJob", json={"token": self.token, "type": "CPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
else:
data = r.json()
self.shard = data["url"]
self.start_id = np.int64(data["start_id"])
self.end_id = np.int64(data["end_id"])
self.shard_piece = data["shard"]
print("recieved new job")
# Downloads the current job's shard to the current directory (./shard.wat)
def downloadShard(self, path="") -> None:
print("downloading shard...")
self.log("Downloading shard", noprint=True)
with self.s.get(self.shard, stream=True) as r:
r.raise_for_status()
with open(path + "temp.gz", 'w+b') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
with gzip.open(path + 'temp.gz', 'rb') as f_in:
with open(path + 'shard.wat', 'w+b') as f_out:
shutil.copyfileobj(f_in, f_out)
sleep(1) # Causes errors otherwise?
os.remove(path + "temp.gz")
self.log("Downloaded shard", noprint=True)
print("finished downloading shard")
# Uploads the image download URL for the GPU workers to use, marking the CPU job complete.
def completeJob(self, image_download_url : str) -> None:
r = _safe_request(self.s.post, self.url + "api/markAsDone", json={
"token": self.token,
"url": image_download_url,
"type": "CPU"
})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
print("marked job as done")
# Logs the string progress into the server.
def log(self, progress : str, crashed=False, noprint=False) -> None:
data = {"token": self.token, "progress": progress, "type": "CPU"}
r = _safe_request(self.s.post, self.url + "api/updateProgress", json=data)
exc = _handle_exceptions(r.status_code, r.text)
if exc and not crashed:
self.log("Crashed", crashed=True)
raise exc
if not crashed and not noprint:
print(f"logged new progress data: {progress}")
# Client wrapper for `recycler.dump`.
def dump(self) -> dict:
from .recycler import dump as _dump
return _dump(self)
# Recreates the client with the server, giving the client a new auth token, upload server and display name.
def recreate(self) -> None:
print("recreating client instance...")
new = CPUClient(self.url, self.nickname)
self.token = new.token
self.display_name = new.display_name
self.upload_address = new.upload_address
# Returns True if the worker is still alive, otherwise returns False.
def isAlive(self) -> bool:
r = _safe_request(self.s.post, self.url + "api/validateWorker", json={"token": self.token, "type": "CPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
else:
return ("True" in r.text)
# Removes the node instance from the server, ending all current jobs.
def bye(self) -> None:
_safe_request(self.s.post, self.url + "api/bye", json={"token": self.token, "type": "CPU"})
print("closed worker")
# The GPU client instance.
class GPUClient:
def __init__(self, url, nickname, _recycled=False) -> None:
if _recycled:
return
if url[-1] != "/":
url += "/"
self.s = session()
self.url = url
self.type = "GPU"
self.nickname = nickname
print("connecting to crawling@home server...")
payload = {"nickname": nickname, "type": "GPU"}
r = _safe_request(self.s.get, self.url + "api/new", params=payload)
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
print("connected to crawling@home server")
data = r.json()
self.token = data["token"]
self.display_name = data["display_name"]
self.upload_address = data["upload_address"]
print(f"worker name: {self.display_name}")
_builtin_print("\n\n")
print(f"You can view this worker's progress at {self.url + 'worker/gpu/' + self.display_name}\n")
# Finds the amount of available jobs from the server, returning an integer.
def updateUploadServer(self) -> None:
r = _safe_request(self.s.get, self.url + "api/getUploadAddress", params={"type": "GPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
self.upload_address = r.text
print("updated upload server address")
# Finds the amount of available jobs from the server, returning an integer.
def jobCount(self) -> int:
r = _safe_request(self.s.get, self.url + "api/jobCount", params={"type": "GPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
count = int(r.text)
print(f"GPU jobs remaining: {count}")
return count
# Makes the node send a request to the server, asking for a new job.
def newJob(self) -> None:
print("looking for new job...")
r = _safe_request(self.s.post, self.url + "api/newJob", json={"token": self.token, "type": "GPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
else:
data = r.json()
self.shard = data["url"]
self.start_id = np.int64(data["start_id"])
self.end_id = np.int64(data["end_id"])
self.shard_piece = data["shard"]
print("recieved new job")
# Flags a GPU job's URL as invalid to the server.
def invalidURL(self) -> None:
r = _safe_request(self.s.post, self.url + "api/gpuInvalidDownload", json={"token": self.token, "type": "GPU"})
if r.status_code != 200:
print("something went wrong when flagging a URL as invalid - not raising error.")
else:
print("successfully flagged url as invalid")
raise InvalidURLError('[crawling@home] Invalid URL')
# Downloads the CPU worker's processed images to the ./images/ (`path`) directory
def downloadShard(self, path="") -> None:
print("downloading shard...")
self.log("Downloading shard", noprint=True)
if self.shard.startswith('http'):
with self.s.get(self.shard, stream=True) as r:
r.raise_for_status()
with open(path + "temp.gz", 'w+b') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
with gzip.open(path + 'temp.gz', 'rb') as f_in:
with open(path + 'shard.wat', 'w+b') as f_out:
shutil.copyfileobj(f_in, f_out)
sleep(1) # Causes errors otherwise?
os.remove(path + "temp.gz")
elif self.shard.startswith('rsync'):
uid = self.shard.split('rsync', 1)[-1].strip()
resp = 1
for _ in range(5):
resp = os.system(f'rsync -av archiveteam@5.9.55.230::gpujobs/{uid}.tar.gz {uid}.tar.gz')
if resp == 5888:
print('[crawling@home] rsync job not found')
self.invalidURL()
if resp == 0:
with tarfile.open(f"{uid}.tar.gz", "r:gz") as tar:
tar.extractall()
break
else:
self.invalidURL()
self.log("Downloaded shard", noprint=True)
print("finished downloading shard")
# Uploads the image download URL for the GPU workers to use, marking the CPU job complete.
def completeJob(self, total_scraped : int) -> None:
r = _safe_request(self.s.post, self.url + "api/markAsDone", json={"token": self.token, "count": total_scraped, "type": "GPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
print("marked job as done")
# Logs the string progress into the server.
def log(self, progress : str, crashed=False, noprint=False) -> None:
data = {"token": self.token, "progress": progress, "type": "GPU"}
r = _safe_request(self.s.post, self.url + "api/updateProgress", json=data)
exc = _handle_exceptions(r.status_code, r.text)
if exc and not crashed:
self.log("Crashed", crashed=True)
raise exc
if not crashed and not noprint:
print(f"logged new progress data: {progress}")
# Client wrapper for `recycler.dump`.
def dump(self) -> dict:
from .recycler import dump as _dump
return _dump(self)
# Recreates the client with the server, giving the client a new auth token, upload server and display name.
def recreate(self) -> None:
print("recreating client instance...")
new = GPUClient(self.url, self.nickname)
self.token = new.token
self.display_name = new.display_name
self.upload_address = new.upload_address
# Returns True if the worker is still alive, otherwise returns False.
def isAlive(self) -> bool:
r = _safe_request(self.s.post, self.url + "api/validateWorker", json={"token": self.token, "type": "GPU"})
exc = _handle_exceptions(r.status_code, r.text)
if exc:
self.log("Crashed", crashed=True)
raise exc
else:
return ("True" in r.text)
# Removes the node instance from the server, ending all current jobs.
def bye(self) -> None:
_safe_request(self.s.post, self.url + "api/bye", json={"token": self.token, "type": "GPU"})
print("closed worker")
# Creates and returns a new client instance.
def init(url="http://crawlingathome.duckdns.org/", nickname="anonymous", type="Hybrid") -> Optional[Union[HybridClient, CPUClient, GPUClient]]:
if isinstance(type, str):
type = type.lower()[0]
if type == "h" or type == HybridClient:
return HybridClient(url, nickname)
elif type == "c" or type == CPUClient:
return CPUClient(url, nickname)
elif type == "g" or type == GPUClient:
return GPUClient(url, nickname)
else:
raise ValueError(f"[crawling@home] invalid worker `{type}`")
| 35.589916
| 144
| 0.55851
| 2,544
| 21,176
| 4.55228
| 0.112028
| 0.022364
| 0.032381
| 0.034539
| 0.816251
| 0.809257
| 0.806062
| 0.797945
| 0.797945
| 0.793541
| 0
| 0.004717
| 0.319182
| 21,176
| 594
| 145
| 35.649832
| 0.798571
| 0.110691
| 0
| 0.735369
| 0
| 0.010178
| 0.178002
| 0.004799
| 0
| 0
| 0
| 0
| 0
| 1
| 0.099237
| false
| 0
| 0.033079
| 0
| 0.195929
| 0.16285
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b292cea70a8b344dea0f7d2e4ea767eb654eaa6
| 10,147
|
py
|
Python
|
corehq/ex-submodules/casexml/apps/phone/tests/test_index_tree.py
|
johan--/commcare-hq
|
86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd
|
[
"BSD-3-Clause"
] | null | null | null |
corehq/ex-submodules/casexml/apps/phone/tests/test_index_tree.py
|
johan--/commcare-hq
|
86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd
|
[
"BSD-3-Clause"
] | 1
|
2022-03-12T01:03:25.000Z
|
2022-03-12T01:03:25.000Z
|
corehq/ex-submodules/casexml/apps/phone/tests/test_index_tree.py
|
johan--/commcare-hq
|
86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd
|
[
"BSD-3-Clause"
] | null | null | null |
from django.test import SimpleTestCase
from casexml.apps.phone.models import IndexTree, SimplifiedSyncLog
class TestExtendedFootprint(SimpleTestCase):
def test_simple_linear_structure(self):
[grandparent_id, parent_id, child_id] = all_cases = ['grandparent', 'parent', 'child']
tree = IndexTree(indices={
child_id: convert_list_to_dict([parent_id]),
parent_id: convert_list_to_dict([grandparent_id]),
})
cases = tree.get_all_cases_that_depend_on_case(grandparent_id)
self.assertEqual(cases, set(all_cases))
def test_multiple_children(self):
[grandparent_id, parent_id, child_id_1, child_id_2] = all_cases = ['rickard', 'ned', 'bran', 'arya']
tree = IndexTree(indices={
child_id_1: convert_list_to_dict([parent_id]),
child_id_2: convert_list_to_dict([parent_id]),
parent_id: convert_list_to_dict([grandparent_id]),
})
cases = tree.get_all_cases_that_depend_on_case(grandparent_id)
self.assertEqual(cases, set(all_cases))
class PruningTest(SimpleTestCase):
def test_prune_parent_then_child(self):
[parent_id, child_id] = all_ids = ['parent', 'child']
tree = IndexTree(indices={
child_id: convert_list_to_dict([parent_id]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# this has no effect
sync_log.prune_case(parent_id)
self.assertTrue(child_id in sync_log.case_ids_on_phone)
self.assertTrue(parent_id in sync_log.case_ids_on_phone)
self.assertFalse(child_id in sync_log.dependent_case_ids_on_phone)
self.assertTrue(parent_id in sync_log.dependent_case_ids_on_phone)
# this should prune it entirely
sync_log.prune_case(child_id)
self.assertFalse(child_id in sync_log.case_ids_on_phone)
self.assertFalse(parent_id in sync_log.case_ids_on_phone)
def test_prune_child_then_parent(self):
[parent_id, child_id] = all_ids = ['parent', 'child']
tree = IndexTree(indices={
child_id: convert_list_to_dict([parent_id]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# this should prune the child but not the parent
sync_log.prune_case(child_id)
self.assertFalse(child_id in sync_log.case_ids_on_phone)
self.assertTrue(parent_id in sync_log.case_ids_on_phone)
self.assertFalse(child_id in sync_log.dependent_case_ids_on_phone)
self.assertFalse(parent_id in sync_log.dependent_case_ids_on_phone)
# then pruning the parent should prune it
sync_log.prune_case(parent_id)
self.assertFalse(parent_id in sync_log.case_ids_on_phone)
self.assertFalse(parent_id in sync_log.dependent_case_ids_on_phone)
def test_prune_tiered_top_down(self):
[grandparent_id, parent_id, child_id] = all_ids = ['grandparent', 'parent', 'child']
tree = IndexTree(indices={
child_id: convert_list_to_dict([parent_id]),
parent_id: convert_list_to_dict([grandparent_id]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# this has no effect other than to move the grandparent to dependent
sync_log.prune_case(grandparent_id)
for id in all_ids:
self.assertTrue(id in sync_log.case_ids_on_phone)
self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone)
self.assertFalse(parent_id in sync_log.dependent_case_ids_on_phone)
self.assertFalse(child_id in sync_log.dependent_case_ids_on_phone)
# likewise, this should have no effect other than to move the parent to dependent
sync_log.prune_case(parent_id)
for id in all_ids:
self.assertTrue(id in sync_log.case_ids_on_phone)
self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone)
self.assertTrue(parent_id in sync_log.dependent_case_ids_on_phone)
self.assertFalse(child_id in sync_log.dependent_case_ids_on_phone)
# this should now prune everything
sync_log.prune_case(child_id)
for id in all_ids:
self.assertFalse(id in sync_log.case_ids_on_phone)
self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def test_prune_tiered_bottom_up(self):
[grandparent_id, parent_id, child_id] = all_ids = ['grandparent', 'parent', 'child']
tree = IndexTree(indices={
child_id: convert_list_to_dict([parent_id]),
parent_id: convert_list_to_dict([grandparent_id]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# just pruning the child should prune just the child
sync_log.prune_case(child_id)
self.assertTrue(grandparent_id in sync_log.case_ids_on_phone)
self.assertTrue(parent_id in sync_log.case_ids_on_phone)
self.assertFalse(child_id in sync_log.case_ids_on_phone)
# same for the parent
sync_log.prune_case(parent_id)
self.assertTrue(grandparent_id in sync_log.case_ids_on_phone)
self.assertFalse(parent_id in sync_log.case_ids_on_phone)
# same for the grandparentparent
sync_log.prune_case(grandparent_id)
self.assertFalse(grandparent_id in sync_log.case_ids_on_phone)
def test_prune_multiple_children(self):
[grandparent_id, parent_id, child_id_1, child_id_2] = all_ids = ['rickard', 'ned', 'bran', 'arya']
tree = IndexTree(indices={
child_id_1: convert_list_to_dict([parent_id]),
child_id_2: convert_list_to_dict([parent_id]),
parent_id: convert_list_to_dict([grandparent_id]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# first prune the parent and grandparent
sync_log.prune_case(grandparent_id)
sync_log.prune_case(parent_id)
self.assertTrue(grandparent_id in sync_log.case_ids_on_phone)
self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone)
self.assertTrue(parent_id in sync_log.case_ids_on_phone)
self.assertTrue(parent_id in sync_log.dependent_case_ids_on_phone)
# just pruning one child should preserve the parent index
sync_log.prune_case(child_id_1)
self.assertTrue(grandparent_id in sync_log.case_ids_on_phone)
self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone)
self.assertTrue(parent_id in sync_log.case_ids_on_phone)
self.assertTrue(parent_id in sync_log.dependent_case_ids_on_phone)
self.assertFalse(child_id_1 in sync_log.case_ids_on_phone)
# pruning the other one should wipe it
sync_log.prune_case(child_id_2)
for id in all_ids:
self.assertFalse(id in sync_log.case_ids_on_phone)
self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def test_prune_multiple_parents(self):
[grandparent_id, mother_id, father_id, child_id] = all_ids = ['heart-tree', 'catelyn', 'ned', 'arya']
tree = IndexTree(indices={
child_id: convert_list_to_dict([mother_id, father_id]),
mother_id: convert_list_to_dict([grandparent_id]),
father_id: convert_list_to_dict([grandparent_id]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# first prune everything but the child
sync_log.prune_case(grandparent_id)
sync_log.prune_case(mother_id)
sync_log.prune_case(father_id)
# everything should still be relevant because of the child
for id in all_ids:
self.assertTrue(id in sync_log.case_ids_on_phone)
# pruning the child should wipe everything else
sync_log.prune_case(child_id)
for id in all_ids:
self.assertFalse(id in sync_log.case_ids_on_phone)
self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def test_prune_circular_loops(self):
[peer_id_1, peer_id_2] = all_ids = ['jaime', 'cersei']
tree = IndexTree(indices={
peer_id_1: convert_list_to_dict([peer_id_2]),
peer_id_2: convert_list_to_dict([peer_id_1]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# pruning one peer should keep everything around
sync_log.prune_case(peer_id_1)
for id in all_ids:
self.assertTrue(id in sync_log.case_ids_on_phone)
# pruning the second peer should remove everything
sync_log.prune_case(peer_id_2)
for id in all_ids:
self.assertFalse(id in sync_log.case_ids_on_phone)
def test_prune_very_circular_loops(self):
[peer_id_1, peer_id_2, peer_id_3] = all_ids = ['drogon', 'rhaegal', 'viserion']
tree = IndexTree(indices={
peer_id_1: convert_list_to_dict([peer_id_2]),
peer_id_2: convert_list_to_dict([peer_id_3]),
peer_id_3: convert_list_to_dict([peer_id_1]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids))
# pruning the first two, should still keep everything around
sync_log.prune_case(peer_id_1)
sync_log.prune_case(peer_id_2)
for id in all_ids:
self.assertTrue(id in sync_log.case_ids_on_phone)
sync_log.prune_case(peer_id_3)
for id in all_ids:
self.assertFalse(id in sync_log.case_ids_on_phone)
def test_prune_self_indexing(self):
[id] = ['recursive']
tree = IndexTree(indices={
id: convert_list_to_dict([id]),
})
sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set([id]))
sync_log.prune_case(id)
self.assertFalse(id in sync_log.case_ids_on_phone)
self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def convert_list_to_dict(a_list):
return {str(i): item for i, item in enumerate(a_list)}
| 44.89823
| 109
| 0.699517
| 1,483
| 10,147
| 4.367498
| 0.082266
| 0.087541
| 0.079203
| 0.123205
| 0.85271
| 0.83727
| 0.806237
| 0.787556
| 0.77505
| 0.751891
| 0
| 0.003917
| 0.219966
| 10,147
| 225
| 110
| 45.097778
| 0.814403
| 0.083177
| 0
| 0.755952
| 0
| 0
| 0.02036
| 0
| 0
| 0
| 0
| 0
| 0.297619
| 1
| 0.071429
| false
| 0
| 0.011905
| 0.005952
| 0.10119
| 0.005952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b580064681024b8a5b753cb9d4854bcd5ecbe27
| 4,943
|
py
|
Python
|
test_driven_ranges_test.py
|
clean-code-craft-tcq-2/tdd-buckets-Sathyapriyan-Kannan
|
f83e186e5f3ab073228368cea5a7dfee7642aa6d
|
[
"MIT"
] | null | null | null |
test_driven_ranges_test.py
|
clean-code-craft-tcq-2/tdd-buckets-Sathyapriyan-Kannan
|
f83e186e5f3ab073228368cea5a7dfee7642aa6d
|
[
"MIT"
] | 1
|
2022-03-21T04:11:31.000Z
|
2022-03-21T04:11:31.000Z
|
test_driven_ranges_test.py
|
clean-code-craft-tcq-2/tdd-buckets-Sathyapriyan-Kannan
|
f83e186e5f3ab073228368cea5a7dfee7642aa6d
|
[
"MIT"
] | null | null | null |
import unittest
import test_driven_ranges
class TestDrivenRangesTest(unittest.TestCase):
def test_valid_input(self):
self.assertTrue(test_driven_ranges.is_valid_input([1, 2]))
self.assertFalse(test_driven_ranges.is_valid_input([]))
def test_get_range(self):
self.assertEqual(test_driven_ranges.get_continuous_ranges([3, 3, 5, 4, 10, 11, 12]), ['3-5, 4', '10-12, 3'])
self.assertEqual(test_driven_ranges.get_continuous_ranges([]), 'INVALID_INPUTS')
self.assertEqual(test_driven_ranges.get_continuous_ranges([3]), [])
self.assertEqual(test_driven_ranges.get_continuous_ranges([3, 4]), ['3-4, 2'])
def test_get_most_frequent_reading(self):
self.assertEqual(test_driven_ranges.get_most_frequent_reading([3, 4, 5, 7, 7, 7, 7, 20, 21, 22]), 7)
self.assertEqual(test_driven_ranges.get_most_frequent_reading([]), 'INVALID_INPUTS')
def test_get_threshold(self):
self.assertEqual(test_driven_ranges.get_threshold(12, is_signed=False), 4095)
self.assertEqual(test_driven_ranges.get_threshold(12, is_signed=True), 2047)
self.assertEqual(test_driven_ranges.get_threshold(10, is_signed=False), 1023)
self.assertEqual(test_driven_ranges.get_threshold(10, is_signed=True), 511)
def test_convert_a2d_to_amp(self):
self.assertEqual(
test_driven_ranges.convert_a2d_to_amp(1048, test_driven_ranges.get_threshold(12, is_signed=False), 10,
is_signed=False), 3)
self.assertEqual(
test_driven_ranges.convert_a2d_to_amp(4094, test_driven_ranges.get_threshold(12, is_signed=False), 10,
is_signed=False), 10)
self.assertEqual(
test_driven_ranges.convert_a2d_to_amp(0, test_driven_ranges.get_threshold(10, is_signed=True), 15,
is_signed=True), 15)
self.assertEqual(
test_driven_ranges.convert_a2d_to_amp(1023, test_driven_ranges.get_threshold(10, is_signed=True), 15,
is_signed=True), 15)
self.assertEqual(
test_driven_ranges.convert_a2d_to_amp(550, test_driven_ranges.get_threshold(10, is_signed=True), 15,
is_signed=True), 1)
def test_remove_error_readings(self):
self.assertEqual(test_driven_ranges.remove_error_readings([1000, 1005, 1200, 1494, 4094, 4095], 12),
[1000, 1005, 1200, 1494, 4094])
self.assertEqual(test_driven_ranges.remove_error_readings([1000, 1005, 1200, 1494, 4094, 4095], 10),
[1000, 1005])
def test_convert_a2d_readings_into_current(self):
self.assertEqual(test_driven_ranges.convert_a2d_readings_into_current(
[1000, 1005, 1200, 1494, 4094, 4095], 12, 10, is_signed=False), [2, 2, 3, 4, 10])
self.assertEqual(test_driven_ranges.convert_a2d_readings_into_current(
[1001, 1006, 1201, 1495, 4094, 4095], 10, 15, is_signed=True), [14, 15])
self.assertEqual(test_driven_ranges.convert_a2d_readings_into_current(
[1150, 1200, 1225, 1494], 12, 10, is_signed=False), [3, 3, 3, 4])
def test_get_continuous_ranges_from_a2d_sensor(self):
self.assertEqual(test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[], 12, 10, is_signed=False), 'INVALID_INPUTS')
self.assertEqual(test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[], 12, 10, is_signed=True), 'INVALID_INPUTS')
self.assertEqual(test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[], 10, 15, is_signed=False), 'INVALID_INPUTS')
self.assertEqual(test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[], 10, 15, is_signed=True), 'INVALID_INPUTS')
self.assertEqual(
test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[1000, 1005, 1200, 1494, 4094, 4095], 12, 10, is_signed=False), ['2-4, 4'])
self.assertEqual(
test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[0, 12, 55, 70, 1005, 1200, 1494, 2095, 3890, 4094, 4095], 12, 10, is_signed=True),
['3-5, 3', '9-10, 6'])
self.assertEqual(
test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[0, 44, 100, 150, 511, 600, 750], 10, 15, is_signed=True), ['11-12, 2', '14-15, 2'])
self.assertEqual(
test_driven_ranges.get_continuous_ranges_from_a2d_sensor(
[0, 43, 101, 151, 511, 601, 751, 1500], 12, 10, is_signed=False), ['0-2, 7'])
def test_get_max_possible_reading(self):
self.assertEqual(test_driven_ranges.get_max_possible_reading(10), 1023)
self.assertEqual(test_driven_ranges.get_max_possible_reading(12), 4095)
unittest.main()
| 56.170455
| 116
| 0.658102
| 658
| 4,943
| 4.574468
| 0.138298
| 0.126246
| 0.201993
| 0.249169
| 0.828571
| 0.788704
| 0.751163
| 0.746512
| 0.734552
| 0.562126
| 0
| 0.121387
| 0.230022
| 4,943
| 87
| 117
| 56.816092
| 0.669469
| 0
| 0
| 0.30137
| 0
| 0
| 0.029334
| 0
| 0
| 0
| 0
| 0
| 0.438356
| 1
| 0.123288
| false
| 0
| 0.027397
| 0
| 0.164384
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5b5fe97517109a39eae916478ec731bef364b218
| 99
|
py
|
Python
|
assets/tuned/daemon/tuned/plugins/exceptions.py
|
sjug/cluster-node-tuning-operator
|
8654d1c9558d0d5ef03d14373c877ebc737f9736
|
[
"Apache-2.0"
] | 53
|
2018-11-13T07:02:03.000Z
|
2022-03-25T00:00:04.000Z
|
assets/tuned/daemon/tuned/plugins/exceptions.py
|
sjug/cluster-node-tuning-operator
|
8654d1c9558d0d5ef03d14373c877ebc737f9736
|
[
"Apache-2.0"
] | 324
|
2018-10-02T14:18:54.000Z
|
2022-03-31T23:47:33.000Z
|
assets/tuned/daemon/tuned/plugins/exceptions.py
|
sjug/cluster-node-tuning-operator
|
8654d1c9558d0d5ef03d14373c877ebc737f9736
|
[
"Apache-2.0"
] | 54
|
2018-10-01T16:55:09.000Z
|
2022-03-28T13:56:53.000Z
|
import tuned.exceptions
class NotSupportedPluginException(tuned.exceptions.TunedException):
pass
| 19.8
| 67
| 0.868687
| 9
| 99
| 9.555556
| 0.777778
| 0.348837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070707
| 99
| 4
| 68
| 24.75
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
5b6c7b169bbff704603a33c87c8e8e0c2012a913
| 105
|
py
|
Python
|
cognite/client/__init__.py
|
sakshi87/cognite-sdk-python
|
eb3d569fd058dfd8e3c0c29dee2a635deabad1ac
|
[
"Apache-2.0"
] | null | null | null |
cognite/client/__init__.py
|
sakshi87/cognite-sdk-python
|
eb3d569fd058dfd8e3c0c29dee2a635deabad1ac
|
[
"Apache-2.0"
] | null | null | null |
cognite/client/__init__.py
|
sakshi87/cognite-sdk-python
|
eb3d569fd058dfd8e3c0c29dee2a635deabad1ac
|
[
"Apache-2.0"
] | null | null | null |
from cognite.client._cognite_client import CogniteClient
from cognite.client._version import __version__
| 35
| 56
| 0.885714
| 13
| 105
| 6.615385
| 0.461538
| 0.453488
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07619
| 105
| 2
| 57
| 52.5
| 0.886598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5bff93c9c7f411cad28d013b77c9ed04f20a4b69
| 244
|
py
|
Python
|
theseus/segmentation/datasets/__init__.py
|
kaylode/shrec22-pothole
|
700d1632de686214e42a2f56aeaceab30c8b9a3f
|
[
"MIT"
] | 1
|
2022-03-19T11:52:53.000Z
|
2022-03-19T11:52:53.000Z
|
theseus/segmentation/datasets/__init__.py
|
kaylode/shrec22-pothole
|
700d1632de686214e42a2f56aeaceab30c8b9a3f
|
[
"MIT"
] | null | null | null |
theseus/segmentation/datasets/__init__.py
|
kaylode/shrec22-pothole
|
700d1632de686214e42a2f56aeaceab30c8b9a3f
|
[
"MIT"
] | 1
|
2022-03-19T11:53:10.000Z
|
2022-03-19T11:53:10.000Z
|
from theseus.base.datasets import DATASET_REGISTRY, DATALOADER_REGISTRY
from .csv_dataset import CSVDataset
from .mosaic_dataset import CSVDatasetWithMosaic
DATASET_REGISTRY.register(CSVDataset)
DATASET_REGISTRY.register(CSVDatasetWithMosaic)
| 34.857143
| 71
| 0.889344
| 27
| 244
| 7.814815
| 0.481481
| 0.21327
| 0.218009
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 244
| 7
| 72
| 34.857143
| 0.925439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
753ef0b6bc909f3c77fd22607ba40531e9b7383c
| 55,355
|
py
|
Python
|
tests/test_drs.py
|
rpatil524/fence
|
c5233752f34eaa4ff1046ee930ab7f4218d4ac3b
|
[
"Apache-2.0"
] | null | null | null |
tests/test_drs.py
|
rpatil524/fence
|
c5233752f34eaa4ff1046ee930ab7f4218d4ac3b
|
[
"Apache-2.0"
] | null | null | null |
tests/test_drs.py
|
rpatil524/fence
|
c5233752f34eaa4ff1046ee930ab7f4218d4ac3b
|
[
"Apache-2.0"
] | null | null | null |
import flask
import httpx
import hashlib
import json
import jwt
import pytest
import requests
import responses
from tests import utils
import time
from unittest.mock import MagicMock, patch
from gen3authz.client.arborist.client import ArboristClient
from fence.config import config
from fence.models import GA4GHPassportCache
from tests.utils import add_test_ras_user, TEST_RAS_USERNAME, TEST_RAS_SUB
def get_doc(has_version=True, urls=list(), drs_list=0):
doc = {
"form": "object",
"size": 123,
"urls": ["s3://endpointurl/bucket/key"],
"hashes": {"md5": "1234"},
}
if has_version:
doc["version"] = "1"
if urls:
doc["urls"] = urls
return doc
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
def test_get_presigned_url_unauthorized(
client,
indexd_client,
kid,
rsa_private_key,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
user = {"Authorization": "Bearer INVALID"}
res = client.get(
"/ga4gh/drs/v1/objects/" + test_guid + f"/access/{access_id}",
headers=user,
)
assert res.status_code == 401
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
def test_get_presigned_url_with_access_id(
client,
user_client,
indexd_client,
kid,
rsa_private_key,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
user = {
"Authorization": "Bearer "
+ jwt.encode(
utils.authorized_download_context_claims(
user_client.username, user_client.user_id
),
key=rsa_private_key,
headers={"kid": kid},
algorithm="RS256",
).decode("utf-8")
}
res = client.get(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers=user,
)
assert res.status_code == 200
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
def test_get_presigned_url_no_access_id(
client,
user_client,
indexd_client,
kid,
rsa_private_key,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
user = {
"Authorization": "Bearer "
+ jwt.encode(
utils.authorized_download_context_claims(
user_client.username, user_client.user_id
),
key=rsa_private_key,
headers={"kid": kid},
algorithm="RS256",
).decode("utf-8")
}
res = client.get(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/",
headers=user,
)
assert res.status_code == 400
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
def test_get_presigned_url_no_bearer_token(
client,
indexd_client,
):
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
res = client.get("/ga4gh/drs/v1/objects/" + test_guid + f"/access/{access_id}")
assert res.status_code == 401
@responses.activate
def test_get_presigned_url_wrong_access_id(
client,
user_client,
indexd_client,
kid,
rsa_private_key,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
test_guid = "1"
user = {
"Authorization": "Bearer "
+ jwt.encode(
utils.authorized_download_context_claims(
user_client.username, user_client.user_id
),
key=rsa_private_key,
headers={"kid": kid},
algorithm="RS256",
).decode("utf-8")
}
res = client.get(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/s2",
headers=user,
)
assert res.status_code == 404
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
def test_get_presigned_url_with_encoded_slash(
client,
user_client,
indexd_client,
kid,
rsa_private_key,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
user = {
"Authorization": "Bearer "
+ jwt.encode(
utils.authorized_download_context_claims(
user_client.username, user_client.user_id
),
key=rsa_private_key,
headers={"kid": kid},
algorithm="RS256",
).decode("utf-8")
}
data = get_doc()
data["did"] = "dg.TEST/ed8f4658-6acd-4f96-9dd8-3709890c959e"
did = "dg.TEST%2Fed8f4658-6acd-4f96-9dd8-3709890c959e"
res = client.get(
"/ga4gh/drs/v1/objects/" + did + "/access/" + access_id,
headers=user,
)
assert res.status_code == 200
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
def test_get_presigned_url_with_query_params(
client,
user_client,
indexd_client,
kid,
rsa_private_key,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
user = {
"Authorization": "Bearer "
+ jwt.encode(
utils.authorized_download_context_claims(
user_client.username, user_client.user_id
),
key=rsa_private_key,
headers={"kid": kid},
algorithm="RS256",
).decode("utf-8")
}
data = get_doc()
data["did"] = "dg.TEST/ed8f4658-6acd-4f96-9dd8-3709890c959e"
did = "dg.TEST%2Fed8f4658-6acd-4f96-9dd8-3709890c959e"
res = client.get(
"/ga4gh/drs/v1/objects/"
+ did
+ "/access/"
+ access_id
+ "?userProject=someproject&arbitrary_parameter=val",
headers=user,
)
assert res.status_code == 200
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_passport_use_disabled(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
config["GA4GH_PASSPORTS_TO_DRS_ENABLED"] = False
indexd_record_with_non_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": ["*"],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_non_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
# Prepare Passport/Visa
headers = {"kid": kid}
decoded_visa = {
"iss": "https://stsstg.nih.gov",
"sub": "abcde12345aspdij",
"iat": int(time.time()),
"exp": int(time.time()) + 1000,
"scope": "openid ga4gh_passport_v1 email profile",
"jti": "jtiajoidasndokmasdl",
"txn": "sapidjspa.asipidja",
"name": "",
"ga4gh_visa_v1": {
"type": "https://ras.nih.gov/visas/v1.1",
"asserted": int(time.time()),
"value": "https://stsstg.nih.gov/passport/dbgap/v1.1",
"source": "https://ncbi.nlm.nih.gov/gap",
},
"ras_dbgap_permissions": [
{
"consent_name": "Health/Medical/Biomedical",
"phs_id": "phs000991",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "General Research Use (IRB, PUB)",
"phs_id": "phs000961",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Cardiovascular Disease)",
"phs_id": "phs000279",
"version": "v2",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Health/Medical/Biomedical (IRB)",
"phs_id": "phs000286",
"version": "v6",
"participant_set": "p2",
"consent_group": "c3",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Focused Disease Only, IRB, NPU)",
"phs_id": "phs000289",
"version": "v6",
"participant_set": "p2",
"consent_group": "c2",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Autism Spectrum Disorder)",
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
],
}
encoded_visa = jwt.encode(
decoded_visa, key=rsa_private_key, headers=headers, algorithm="RS256"
).decode("utf-8")
passport_header = {
"type": "JWT",
"alg": "RS256",
"kid": kid,
}
passport = {
"iss": "https://stsstg.nih.gov",
"sub": "abcde12345aspdij",
"iat": int(time.time()),
"scope": "openid ga4gh_passport_v1 email profile",
"exp": int(time.time()) + 1000,
"ga4gh_passport_v1": [encoded_visa],
}
encoded_passport = jwt.encode(
passport, key=rsa_private_key, headers=passport_header, algorithm="RS256"
).decode("utf-8")
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = [encoded_passport]
data = {"passports": passports}
keys = [keypair.public_key_to_jwk() for keypair in flask.current_app.keypairs]
mock_httpx_get.return_value = httpx.Response(200, json={"keys": keys})
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code == 400
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_get_presigned_url_for_non_public_data_with_passport(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
config["GA4GH_PASSPORTS_TO_DRS_ENABLED"] = True
indexd_record_with_non_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": ["*"],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_non_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
# Prepare Passport/Visa
headers = {"kid": kid}
decoded_visa = {
"iss": "https://stsstg.nih.gov",
"sub": "abcde12345aspdij",
"iat": int(time.time()),
"exp": int(time.time()) + 1000,
"scope": "openid ga4gh_passport_v1 email profile",
"jti": "jtiajoidasndokmasdl",
"txn": "sapidjspa.asipidja",
"name": "",
"ga4gh_visa_v1": {
"type": "https://ras.nih.gov/visas/v1.1",
"asserted": int(time.time()),
"value": "https://stsstg.nih.gov/passport/dbgap/v1.1",
"source": "https://ncbi.nlm.nih.gov/gap",
},
"ras_dbgap_permissions": [
{
"consent_name": "Health/Medical/Biomedical",
"phs_id": "phs000991",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "General Research Use (IRB, PUB)",
"phs_id": "phs000961",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Cardiovascular Disease)",
"phs_id": "phs000279",
"version": "v2",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Health/Medical/Biomedical (IRB)",
"phs_id": "phs000286",
"version": "v6",
"participant_set": "p2",
"consent_group": "c3",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Focused Disease Only, IRB, NPU)",
"phs_id": "phs000289",
"version": "v6",
"participant_set": "p2",
"consent_group": "c2",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Autism Spectrum Disorder)",
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
],
}
encoded_visa = jwt.encode(
decoded_visa, key=rsa_private_key, headers=headers, algorithm="RS256"
).decode("utf-8")
passport_header = {
"type": "JWT",
"alg": "RS256",
"kid": kid,
}
passport = {
"iss": "https://stsstg.nih.gov",
"sub": "abcde12345aspdij",
"iat": int(time.time()),
"scope": "openid ga4gh_passport_v1 email profile",
"exp": int(time.time()) + 1000,
"ga4gh_passport_v1": [encoded_visa],
}
encoded_passport = jwt.encode(
passport, key=rsa_private_key, headers=passport_header, algorithm="RS256"
).decode("utf-8")
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = [encoded_passport]
data = {"passports": passports}
keys = [keypair.public_key_to_jwk() for keypair in flask.current_app.keypairs]
mock_httpx_get.return_value = httpx.Response(200, json={"keys": keys})
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code == 200
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_get_presigned_url_with_passport_with_incorrect_authz(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
indexd_record_with_non_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": ["*"],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_non_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": False}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
# Prepare Passport/Visa
headers = {"kid": kid}
decoded_visa = {
"iss": "https://stsstg.nih.gov",
"sub": "abcde12345aspdij",
"iat": int(time.time()),
"exp": int(time.time()) + 1000,
"scope": "openid ga4gh_passport_v1 email profile",
"jti": "jtiajoidasndokmasdl",
"txn": "sapidjspa.asipidja",
"name": "",
"ga4gh_visa_v1": {
"type": "https://ras.nih.gov/visas/v1.1",
"asserted": int(time.time()),
"value": "https://stsstg.nih.gov/passport/dbgap/v1.1",
"source": "https://ncbi.nlm.nih.gov/gap",
},
"ras_dbgap_permissions": [
{
"consent_name": "Health/Medical/Biomedical",
"phs_id": "phs000991",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "General Research Use (IRB, PUB)",
"phs_id": "phs000961",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Cardiovascular Disease)",
"phs_id": "phs000279",
"version": "v2",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Health/Medical/Biomedical (IRB)",
"phs_id": "phs000286",
"version": "v6",
"participant_set": "p2",
"consent_group": "c3",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Focused Disease Only, IRB, NPU)",
"phs_id": "phs000289",
"version": "v6",
"participant_set": "p2",
"consent_group": "c2",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
{
"consent_name": "Disease-Specific (Autism Spectrum Disorder)",
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
],
}
encoded_visa = jwt.encode(
decoded_visa, key=rsa_private_key, headers=headers, algorithm="RS256"
).decode("utf-8")
passport_header = {
"type": "JWT",
"alg": "RS256",
"kid": kid,
}
passport = {
"iss": "https://stsstg.nih.gov",
"sub": "abcde12345aspdij",
"iat": int(time.time()),
"scope": "openid ga4gh_passport_v1 email profile",
"exp": int(time.time()) + 1000,
"ga4gh_passport_v1": [encoded_visa],
}
encoded_passport = jwt.encode(
passport, key=rsa_private_key, headers=passport_header, algorithm="RS256"
).decode("utf-8")
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = [encoded_passport]
data = {"passports": passports}
keys = [keypair.public_key_to_jwk() for keypair in flask.current_app.keypairs]
mock_httpx_get.return_value = httpx.Response(200, json={"keys": keys})
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code == 401
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_get_presigned_url_for_public_data_with_no_passport(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
indexd_record_with_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/open"],
"acl": ["*"],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = []
data = {"passports": passports}
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code == 200
@responses.activate
@pytest.mark.parametrize("indexd_client", ["s3", "gs"], indirect=True)
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_get_presigned_url_for_non_public_data_with_no_passport(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
):
indexd_record_with_public_authz_and_non_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": ["*"],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_public_authz_and_non_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": False}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = []
data = {"passports": passports}
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code == 401
@responses.activate
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_passport_cache_valid_passport(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
db_session,
monkeypatch,
):
"""
Test that when a passport is provided a second time, the in-memory cache gets used
and the database cache is populated.
NOTE: This is very similar to the test_get_presigned_url_for_non_public_data_with_passport
test with added stuff to check cache functionality
"""
# reset caches
PASSPORT_CACHE = {}
from fence.resources.ga4gh import passports as passports_module
monkeypatch.setattr(passports_module, "PASSPORT_CACHE", PASSPORT_CACHE)
db_session.query(GA4GHPassportCache).delete()
db_session.commit()
config["GA4GH_PASSPORTS_TO_DRS_ENABLED"] = True
indexd_record_with_non_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": ["*"],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_non_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
# Prepare Passport/Visa
current_time = int(time.time())
headers = {"kid": kid}
decoded_visa = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"exp": current_time + 1000,
"scope": "openid ga4gh_passport_v1 email profile",
"jti": "jtiajoidasndokmasdl",
"txn": "sapidjspa.asipidja",
"name": "",
"ga4gh_visa_v1": {
"type": "https://ras.nih.gov/visas/v1.1",
"asserted": current_time,
"value": "https://stsstg.nih.gov/passport/dbgap/v1.1",
"source": "https://ncbi.nlm.nih.gov/gap",
},
"ras_dbgap_permissions": [
{
"consent_name": "Health/Medical/Biomedical",
"phs_id": "phs000991",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "General Research Use (IRB, PUB)",
"phs_id": "phs000961",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Cardiovascular Disease)",
"phs_id": "phs000279",
"version": "v2",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Health/Medical/Biomedical (IRB)",
"phs_id": "phs000286",
"version": "v6",
"participant_set": "p2",
"consent_group": "c3",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Focused Disease Only, IRB, NPU)",
"phs_id": "phs000289",
"version": "v6",
"participant_set": "p2",
"consent_group": "c2",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Autism Spectrum Disorder)",
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
],
}
encoded_visa = jwt.encode(
decoded_visa, key=rsa_private_key, headers=headers, algorithm="RS256"
).decode("utf-8")
passport_header = {
"type": "JWT",
"alg": "RS256",
"kid": kid,
}
passport = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"scope": "openid ga4gh_passport_v1 email profile",
"exp": current_time + 1000,
"ga4gh_passport_v1": [encoded_visa],
}
encoded_passport = jwt.encode(
passport, key=rsa_private_key, headers=passport_header, algorithm="RS256"
).decode("utf-8")
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = [encoded_passport]
data = {"passports": passports}
keys = [keypair.public_key_to_jwk() for keypair in flask.current_app.keypairs]
mock_httpx_get.return_value = httpx.Response(200, json={"keys": keys})
passport_hash = hashlib.sha256(encoded_passport.encode("utf-8")).hexdigest()
# check database cache
cached_passports = [
item.passport_hash for item in db_session.query(GA4GHPassportCache).all()
]
assert passport_hash not in cached_passports
# check in-memory cache
assert not PASSPORT_CACHE.get(passport_hash)
before_cache_start = time.time()
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
before_cache_end = time.time()
before_cache_time = before_cache_end - before_cache_start
assert res.status_code == 200
# check that database cache populated
cached_passports = [
item.passport_hash for item in db_session.query(GA4GHPassportCache).all()
]
assert passport_hash in cached_passports
# check that in-memory cache populated
assert PASSPORT_CACHE.get(passport_hash)
after_cache_start = time.time()
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
after_cache_end = time.time()
after_cache_time = after_cache_end - after_cache_start
assert res.status_code == 200
# make sure using the cache is faster
assert after_cache_time < before_cache_time
@responses.activate
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_passport_cache_invalid_passport(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
db_session,
monkeypatch,
):
"""
Test that when an invalid passport is provided a second time, the in-memory cache
does NOT get used and the database cache is NOT populated.
NOTE: This is very similar to the test_get_presigned_url_for_non_public_data_with_passport
test with added stuff to check cache functionality
"""
# reset caches
PASSPORT_CACHE = {}
from fence.resources.ga4gh import passports as passports_module
monkeypatch.setattr(passports_module, "PASSPORT_CACHE", PASSPORT_CACHE)
db_session.query(GA4GHPassportCache).delete()
db_session.commit()
config["GA4GH_PASSPORTS_TO_DRS_ENABLED"] = True
indexd_record_with_non_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": [""],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_non_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": False}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
# Prepare Passport/Visa
current_time = int(time.time())
headers = {"kid": kid}
decoded_visa = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"exp": current_time + 1000,
"scope": "openid ga4gh_passport_v1 email profile",
"jti": "jtiajoidasndokmasdl",
"txn": "sapidjspa.asipidja",
"name": "",
"ga4gh_visa_v1": {
"type": "https://ras.nih.gov/visas/v1.1",
"asserted": current_time,
"value": "https://stsstg.nih.gov/passport/dbgap/v1.1",
"source": "https://ncbi.nlm.nih.gov/gap",
},
"ras_dbgap_permissions": [
{
"consent_name": "Health/Medical/Biomedical",
"phs_id": "phs000991",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "General Research Use (IRB, PUB)",
"phs_id": "phs000961",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Cardiovascular Disease)",
"phs_id": "phs000279",
"version": "v2",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Health/Medical/Biomedical (IRB)",
"phs_id": "phs000286",
"version": "v6",
"participant_set": "p2",
"consent_group": "c3",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Focused Disease Only, IRB, NPU)",
"phs_id": "phs000289",
"version": "v6",
"participant_set": "p2",
"consent_group": "c2",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Autism Spectrum Disorder)",
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
],
}
encoded_visa = jwt.encode(
decoded_visa, key=rsa_private_key, headers=headers, algorithm="RS256"
).decode("utf-8")
passport_header = {
"type": "JWT",
"alg": "RS256",
"kid": kid,
}
passport = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"scope": "openid ga4gh_passport_v1 email profile",
"exp": current_time + 1000,
"ga4gh_passport_v1": [encoded_visa],
}
invalid_encoded_passport = "invalid" + jwt.encode(
passport, key=rsa_private_key, headers=passport_header, algorithm="RS256"
).decode("utf-8")
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = [invalid_encoded_passport]
data = {"passports": passports}
keys = [keypair.public_key_to_jwk() for keypair in flask.current_app.keypairs]
mock_httpx_get.return_value = httpx.Response(200, json={"keys": keys})
passport_hash = hashlib.sha256(invalid_encoded_passport.encode("utf-8")).hexdigest()
# check database cache
cached_passports = [
item.passport_hash for item in db_session.query(GA4GHPassportCache).all()
]
assert passport_hash not in cached_passports
# check in-memory cache
assert not PASSPORT_CACHE.get(passport_hash)
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code != 200
# check that database cache NOT populated
cached_passports = [
item.passport_hash for item in db_session.query(GA4GHPassportCache).all()
]
assert passport_hash not in cached_passports
# check that in-memory cache NOT populated
assert not PASSPORT_CACHE.get(passport_hash)
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code != 200
@responses.activate
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_passport_cache_expired_in_memory_valid_in_db(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
db_session,
monkeypatch,
):
"""
Test that when a passport is provided a second time when the the in-memory cache
is expired but the database cache is valid, we still get a successful response.
Check that cached database is updated and placed in in-memory cache.
NOTE: This is very similar to the test_get_presigned_url_for_non_public_data_with_passport
test with added stuff to check cache functionality
"""
# reset cache
# PASSPORT_CACHE = {}
from fence.resources.ga4gh import passports as passports_module
# monkeypatch.setattr(passports_module, "PASSPORT_CACHE", PASSPORT_CACHE)
db_session.query(GA4GHPassportCache).delete()
db_session.commit()
# # add test user
# test_user = add_test_ras_user(db_session=db_session)
# test_user.username = "abcd-asdj-sajpiasj12iojd-asnoinstsstg.nih.gov"
test_username = "abcd-asdj-sajpiasj12iojd-asnoinstsstg.nih.gov"
# mocked_method = MagicMock(return_value=test_user)
# patch_method = patch(
# "fence.resources.ga4gh.passports.query_for_user", mocked_method
# )
# patch_method.start()
config["GA4GH_PASSPORTS_TO_DRS_ENABLED"] = True
indexd_record_with_non_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": [""],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_non_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
# Prepare Passport/Visa
current_time = int(time.time())
headers = {"kid": kid}
decoded_visa = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"exp": current_time + 1000,
"scope": "openid ga4gh_passport_v1 email profile",
"jti": "jtiajoidasndokmasdl",
"txn": "sapidjspa.asipidja",
"name": "",
"ga4gh_visa_v1": {
"type": "https://ras.nih.gov/visas/v1.1",
"asserted": current_time,
"value": "https://stsstg.nih.gov/passport/dbgap/v1.1",
"source": "https://ncbi.nlm.nih.gov/gap",
},
"ras_dbgap_permissions": [
{
"consent_name": "Health/Medical/Biomedical",
"phs_id": "phs000991",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "General Research Use (IRB, PUB)",
"phs_id": "phs000961",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Cardiovascular Disease)",
"phs_id": "phs000279",
"version": "v2",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Health/Medical/Biomedical (IRB)",
"phs_id": "phs000286",
"version": "v6",
"participant_set": "p2",
"consent_group": "c3",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Focused Disease Only, IRB, NPU)",
"phs_id": "phs000289",
"version": "v6",
"participant_set": "p2",
"consent_group": "c2",
"role": "designated user",
"expiration": current_time + 1000,
},
{
"consent_name": "Disease-Specific (Autism Spectrum Disorder)",
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 1000,
},
],
}
encoded_visa = jwt.encode(
decoded_visa, key=rsa_private_key, headers=headers, algorithm="RS256"
).decode("utf-8")
passport_header = {
"type": "JWT",
"alg": "RS256",
"kid": kid,
}
passport = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"scope": "openid ga4gh_passport_v1 email profile",
"exp": current_time + 1000,
"ga4gh_passport_v1": [encoded_visa],
}
encoded_passport = jwt.encode(
passport, key=rsa_private_key, headers=passport_header, algorithm="RS256"
).decode("utf-8")
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = [encoded_passport]
data = {"passports": passports}
keys = [keypair.public_key_to_jwk() for keypair in flask.current_app.keypairs]
mock_httpx_get.return_value = httpx.Response(200, json={"keys": keys})
passport_hash = hashlib.sha256(encoded_passport.encode("utf-8")).hexdigest()
# simulate db cache with a valid passport by first calling the endpoint to cache
# res = client.post(
# "/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
# headers={
# "Content-Type": "application/json",
# },
# data=json.dumps(data),
# )
# assert res.status_code == 200
passports_module.put_gen3_usernames_for_passport_into_cache(
encoded_passport, [test_username], current_time + 1000, db_session=db_session
)
# double-check database cache
cached_passport = (
db_session.query(GA4GHPassportCache)
.filter(GA4GHPassportCache.passport_hash == passport_hash)
.first()
)
# greater and NOT == b/c of logic to set internal expiration less than real to allow
# time for expiration job to run
assert cached_passport and cached_passport.expires_at > current_time
# simulate in-memory cache with an expired passport by overriding the in-memory cache
from fence.resources.ga4gh import passports as passports_module
PASSPORT_CACHE = {f"{passport_hash}": ([test_username], current_time - 1)}
assert PASSPORT_CACHE.get(passport_hash, ("", 0))[1] == current_time - 1
monkeypatch.setattr(passports_module, "PASSPORT_CACHE", PASSPORT_CACHE)
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code == 200
# check that database cache still populated
assert (
len([item.passport_hash for item in db_session.query(GA4GHPassportCache).all()])
== 1
)
cached_passport = (
db_session.query(GA4GHPassportCache)
.filter(GA4GHPassportCache.passport_hash == passport_hash)
.first()
)
# greater and NOT == b/c of logic to set internal expiration less than real to allow
# time for expiration job to run
assert cached_passport and cached_passport.expires_at > current_time
# check that in-memory cache populated with db expiration
# greater and NOT == b/c of logic to set internal expiration less than real to allow
# time for expiration job to run
if PASSPORT_CACHE.get(passport_hash, ("", 0))[1] == 0:
from fence.resources.ga4gh.passports import PASSPORT_CACHE as import_cache
assert PASSPORT_CACHE == None
assert PASSPORT_CACHE.get(passport_hash, ("", 0))[1] > current_time
@responses.activate
@patch("httpx.get")
@patch("fence.resources.google.utils._create_proxy_group")
@patch("fence.scripting.fence_create.ArboristClient")
def test_passport_cache_expired(
mock_arborist,
mock_google_proxy_group,
mock_httpx_get,
client,
indexd_client,
kid,
rsa_private_key,
rsa_public_key,
indexd_client_accepting_record,
mock_arborist_requests,
google_proxy_group,
primary_google_service_account,
cloud_manager,
google_signed_url,
db_session,
monkeypatch,
):
"""
Test that when a passport is expired, we don't get a successful response, even
if the passport was previously cached.
NOTE: This is very similar to the test_get_presigned_url_for_non_public_data_with_passport
test with added stuff to check cache functionality
"""
# reset cache
PASSPORT_CACHE = {}
from fence.resources.ga4gh import passports as passports_module
monkeypatch.setattr(passports_module, "PASSPORT_CACHE", PASSPORT_CACHE)
db_session.query(GA4GHPassportCache).delete()
db_session.commit()
config["GA4GH_PASSPORTS_TO_DRS_ENABLED"] = True
indexd_record_with_non_public_authz_and_public_acl_populated = {
"did": "1",
"baseid": "",
"rev": "",
"size": 10,
"file_name": "file1",
"urls": ["s3://bucket1/key", "gs://bucket1/key"],
"hashes": {},
"metadata": {},
"authz": ["/orgA/programs/phs000991.c1"],
"acl": [""],
"form": "",
"created_date": "",
"updated_date": "",
}
indexd_client_accepting_record(
indexd_record_with_non_public_authz_and_public_acl_populated
)
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}})
mock_arborist.return_value = MagicMock(ArboristClient)
mock_google_proxy_group.return_value = google_proxy_group
# Prepare Passport/Visa
current_time = int(time.time())
headers = {"kid": kid}
decoded_visa = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"exp": current_time + 2,
"scope": "openid ga4gh_passport_v1 email profile",
"jti": "jtiajoidasndokmasdl",
"txn": "sapidjspa.asipidja",
"name": "",
"ga4gh_visa_v1": {
"type": "https://ras.nih.gov/visas/v1.1",
"asserted": current_time,
"value": "https://stsstg.nih.gov/passport/dbgap/v1.1",
"source": "https://ncbi.nlm.nih.gov/gap",
},
"ras_dbgap_permissions": [
{
"consent_name": "Health/Medical/Biomedical",
"phs_id": "phs000991",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 2,
},
{
"consent_name": "General Research Use (IRB, PUB)",
"phs_id": "phs000961",
"version": "v1",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 2,
},
{
"consent_name": "Disease-Specific (Cardiovascular Disease)",
"phs_id": "phs000279",
"version": "v2",
"participant_set": "p1",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 2,
},
{
"consent_name": "Health/Medical/Biomedical (IRB)",
"phs_id": "phs000286",
"version": "v6",
"participant_set": "p2",
"consent_group": "c3",
"role": "designated user",
"expiration": current_time + 2,
},
{
"consent_name": "Disease-Specific (Focused Disease Only, IRB, NPU)",
"phs_id": "phs000289",
"version": "v6",
"participant_set": "p2",
"consent_group": "c2",
"role": "designated user",
"expiration": current_time + 2,
},
{
"consent_name": "Disease-Specific (Autism Spectrum Disorder)",
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"role": "designated user",
"expiration": current_time + 2,
},
],
}
encoded_visa = jwt.encode(
decoded_visa, key=rsa_private_key, headers=headers, algorithm="RS256"
).decode("utf-8")
passport_header = {
"type": "JWT",
"alg": "RS256",
"kid": kid,
}
passport = {
"iss": "https://stsstg.nih.gov",
"sub": TEST_RAS_SUB,
"iat": current_time,
"scope": "openid ga4gh_passport_v1 email profile",
"exp": current_time + 2,
"ga4gh_passport_v1": [encoded_visa],
}
encoded_passport = jwt.encode(
passport, key=rsa_private_key, headers=passport_header, algorithm="RS256"
).decode("utf-8")
access_id = indexd_client["indexed_file_location"]
test_guid = "1"
passports = [encoded_passport]
data = {"passports": passports}
keys = [keypair.public_key_to_jwk() for keypair in flask.current_app.keypairs]
mock_httpx_get.return_value = httpx.Response(200, json={"keys": keys})
passport_hash = hashlib.sha256(encoded_passport.encode("utf-8")).hexdigest()
# check database cache
cached_passports = [
item.passport_hash for item in db_session.query(GA4GHPassportCache).all()
]
assert passport_hash not in cached_passports
# check in-memory cache
assert not PASSPORT_CACHE.get(passport_hash)
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code == 200
# ensure passport is expired by sleeping
expire_time = current_time + 2
current_time = int(time.time())
if current_time < expire_time:
sleep_time = expire_time - current_time
time.sleep(sleep_time)
# try again
mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": False}, 200)}})
res = client.post(
"/ga4gh/drs/v1/objects/" + test_guid + "/access/" + access_id,
headers={
"Content-Type": "application/json",
},
data=json.dumps(data),
)
assert res.status_code != 200
| 32.447245
| 94
| 0.573245
| 5,782
| 55,355
| 5.212037
| 0.060706
| 0.023892
| 0.022299
| 0.039023
| 0.936023
| 0.934364
| 0.928126
| 0.921257
| 0.917607
| 0.915782
| 0
| 0.031908
| 0.292855
| 55,355
| 1,705
| 95
| 32.466276
| 0.737968
| 0.054394
| 0
| 0.823253
| 0
| 0
| 0.258116
| 0.056633
| 0
| 0
| 0
| 0
| 0.028898
| 1
| 0.011425
| false
| 0.09879
| 0.014113
| 0
| 0.02621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
7559abc11f30f72979aa3bfbc3adc76e19d2eabb
| 6,315
|
py
|
Python
|
cbe/cbe/supplier_partner/migrations/0001_initial.py
|
cdaf/cbe
|
7945a3fad11ae4612e22163094571ac9157dca7f
|
[
"Apache-2.0"
] | 3
|
2019-02-26T19:54:51.000Z
|
2021-03-23T02:57:02.000Z
|
cbe/cbe/supplier_partner/migrations/0001_initial.py
|
cdaf/cbe
|
7945a3fad11ae4612e22163094571ac9157dca7f
|
[
"Apache-2.0"
] | 6
|
2016-12-23T02:11:21.000Z
|
2018-09-30T18:50:59.000Z
|
cbe/cbe/supplier_partner/migrations/0001_initial.py
|
cdaf/cbe
|
7945a3fad11ae4612e22163094571ac9157dca7f
|
[
"Apache-2.0"
] | 4
|
2017-02-11T04:40:52.000Z
|
2020-10-12T22:22:54.000Z
|
# Generated by Django 2.0.1 on 2018-01-31 15:25
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('party', '0001_initial'),
('resource', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Buyer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valid_from', models.DateTimeField(auto_now_add=True)),
('valid_to', models.DateTimeField(blank=True, null=True)),
('name', models.CharField(max_length=200)),
('code', models.CharField(blank=True, max_length=50, null=True)),
('email_contacts', models.ManyToManyField(blank=True, related_name='supplier_partner_buyer_email_contacts', to='party.EmailContact')),
('individual', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='party.Individual')),
('logical_resources', models.ManyToManyField(blank=True, related_name='supplier_partner_buyer_logical_resources', to='resource.LogicalResource')),
('organisation', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='party.Organisation')),
('physical_contacts', models.ManyToManyField(blank=True, related_name='supplier_partner_buyer_physical_contacts', to='party.PhysicalContact')),
('physical_resources', models.ManyToManyField(blank=True, related_name='supplier_partner_buyer_physical_resources', to='resource.PhysicalResource')),
('telephone_numbers', models.ManyToManyField(blank=True, related_name='supplier_partner_buyer_telephone_numbers', to='party.TelephoneNumber')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Partner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valid_from', models.DateTimeField(auto_now_add=True)),
('valid_to', models.DateTimeField(blank=True, null=True)),
('name', models.CharField(max_length=200)),
('code', models.CharField(blank=True, max_length=50, null=True)),
('email_contacts', models.ManyToManyField(blank=True, related_name='supplier_partner_partner_email_contacts', to='party.EmailContact')),
('individual', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='party.Individual')),
('logical_resources', models.ManyToManyField(blank=True, related_name='supplier_partner_partner_logical_resources', to='resource.LogicalResource')),
('organisation', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='party.Organisation')),
('physical_contacts', models.ManyToManyField(blank=True, related_name='supplier_partner_partner_physical_contacts', to='party.PhysicalContact')),
('physical_resources', models.ManyToManyField(blank=True, related_name='supplier_partner_partner_physical_resources', to='resource.PhysicalResource')),
('telephone_numbers', models.ManyToManyField(blank=True, related_name='supplier_partner_partner_telephone_numbers', to='party.TelephoneNumber')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Supplier',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valid_from', models.DateTimeField(auto_now_add=True)),
('valid_to', models.DateTimeField(blank=True, null=True)),
('name', models.CharField(max_length=200)),
('code', models.CharField(blank=True, max_length=50, null=True)),
('email_contacts', models.ManyToManyField(blank=True, related_name='supplier_partner_supplier_email_contacts', to='party.EmailContact')),
('individual', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='party.Individual')),
('logical_resources', models.ManyToManyField(blank=True, related_name='supplier_partner_supplier_logical_resources', to='resource.LogicalResource')),
('organisation', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='party.Organisation')),
('physical_contacts', models.ManyToManyField(blank=True, related_name='supplier_partner_supplier_physical_contacts', to='party.PhysicalContact')),
('physical_resources', models.ManyToManyField(blank=True, related_name='supplier_partner_supplier_physical_resources', to='resource.PhysicalResource')),
('telephone_numbers', models.ManyToManyField(blank=True, related_name='supplier_partner_supplier_telephone_numbers', to='party.TelephoneNumber')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='SupplierAccount',
fields=[
('created', models.DateField(auto_now_add=True)),
('valid_from', models.DateField(blank=True, null=True)),
('valid_to', models.DateField(blank=True, null=True)),
('account_number', models.CharField(max_length=200, primary_key=True, serialize=False)),
('account_status', models.CharField(max_length=100)),
('account_type', models.CharField(max_length=200)),
('name', models.CharField(max_length=300)),
('pin', models.CharField(blank=True, max_length=100, null=True)),
('supplier', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customer_accounts', to='supplier_partner.Supplier')),
],
),
]
| 68.641304
| 169
| 0.641964
| 629
| 6,315
| 6.219396
| 0.138315
| 0.069018
| 0.099693
| 0.115031
| 0.884202
| 0.851227
| 0.826431
| 0.816973
| 0.816973
| 0.811861
| 0
| 0.010803
| 0.22312
| 6,315
| 91
| 170
| 69.395604
| 0.786588
| 0.007126
| 0
| 0.511905
| 1
| 0
| 0.271491
| 0.148454
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f37464f83f643cb4f590c4fddb03b1710334e045
| 1,724
|
py
|
Python
|
test/unit/core/git/test_ignore.py
|
novopl/peltak
|
7c8ac44f994d923091a534870960fdae1e15e95e
|
[
"Apache-2.0"
] | 6
|
2015-09-10T13:20:34.000Z
|
2021-02-15T08:10:27.000Z
|
test/unit/core/git/test_ignore.py
|
novopl/peltak
|
7c8ac44f994d923091a534870960fdae1e15e95e
|
[
"Apache-2.0"
] | 41
|
2015-09-09T12:44:55.000Z
|
2021-06-01T23:25:56.000Z
|
test/unit/core/git/test_ignore.py
|
novopl/peltak
|
7c8ac44f994d923091a534870960fdae1e15e95e
|
[
"Apache-2.0"
] | null | null | null |
# pylint: disable=missing-docstring
from unittest.mock import patch, mock_open
from peltak.core import git
from peltak.core import util
FAKE_GIT_IGNORE = '\n'.join([
'pattern1',
'pattern2',
'/pattern3',
])
@patch('peltak.core.git.open', mock_open(read_data='\n'.join([
'pattern1',
'pattern2',
'pattern3',
])))
def test_returns_all_patterns(app_conf):
util.cached_result.clear(git.ignore)
assert frozenset(git.ignore()) == frozenset((
'pattern1',
'pattern2',
'pattern3'
))
util.cached_result.clear(git.ignore)
@patch('peltak.core.git.open', mock_open(read_data='\n'.join([
'pattern1',
' pattern2 \t',
'\tpattern3',
])))
def test_strips_whitespace(app_conf):
util.cached_result.clear(git.ignore)
assert frozenset(git.ignore()) == frozenset((
'pattern1',
'pattern2',
'pattern3'
))
util.cached_result.clear(git.ignore)
@patch('peltak.core.git.open', mock_open(read_data='\n'.join([
'pattern1',
''
' pattern2 \t',
''
'\tpattern3',
''
])))
def test_skips_empty_lines(app_conf):
util.cached_result.clear(git.ignore)
assert frozenset(git.ignore()) == frozenset((
'pattern1',
'pattern2',
'pattern3'
))
util.cached_result.clear(git.ignore)
@patch('peltak.core.git.open', mock_open(read_data=b'\n'.join([
b'pattern1',
b''
b' pattern2 \t',
b''
b'\tpattern3',
b''
])))
def test_works_if_parse_data_is_bytes(app_conf):
util.cached_result.clear(git.ignore)
assert frozenset(git.ignore()) == frozenset((
'pattern1',
'pattern2',
'pattern3'
))
util.cached_result.clear(git.ignore)
| 21.822785
| 63
| 0.61891
| 205
| 1,724
| 5.02439
| 0.234146
| 0.113592
| 0.124272
| 0.163107
| 0.759223
| 0.723301
| 0.723301
| 0.723301
| 0.723301
| 0.723301
| 0
| 0.019912
| 0.213457
| 1,724
| 78
| 64
| 22.102564
| 0.739676
| 0.019142
| 0
| 0.772727
| 0
| 0
| 0.195974
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 1
| 0.060606
| false
| 0
| 0.045455
| 0
| 0.106061
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f3a5bb8d7982c7d85f7d3e584dc3d1925927e78e
| 229
|
py
|
Python
|
serpcord/exceptions/__init__.py
|
PgBiel/serpcord
|
482736dc691027417edcd6500cdfbf9053f92b63
|
[
"MIT"
] | null | null | null |
serpcord/exceptions/__init__.py
|
PgBiel/serpcord
|
482736dc691027417edcd6500cdfbf9053f92b63
|
[
"MIT"
] | null | null | null |
serpcord/exceptions/__init__.py
|
PgBiel/serpcord
|
482736dc691027417edcd6500cdfbf9053f92b63
|
[
"MIT"
] | null | null | null |
from .serpcordexc import SerpcordException, APIRequestException # TODO: Exception Hierarchy graph
from .dataparseexc import APIDataParseException, APIJsonParseException, APIJsonParsedTypeMismatchException
from .httpexc import *
| 57.25
| 106
| 0.868996
| 18
| 229
| 11.055556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091703
| 229
| 3
| 107
| 76.333333
| 0.956731
| 0.135371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
340e5bbb372686d75ed04441a12868c0fd2d8f56
| 260
|
py
|
Python
|
fluxio_parser/transformers/__init__.py
|
NarrativeScience/fluxio-parser
|
bddd6b86a550ec87a58a2d854978d559e29cf3f4
|
[
"BSD-3-Clause"
] | 1
|
2021-06-09T20:22:38.000Z
|
2021-06-09T20:22:38.000Z
|
fluxio_parser/transformers/__init__.py
|
NarrativeScience/fluxio-parser
|
bddd6b86a550ec87a58a2d854978d559e29cf3f4
|
[
"BSD-3-Clause"
] | null | null | null |
fluxio_parser/transformers/__init__.py
|
NarrativeScience/fluxio-parser
|
bddd6b86a550ec87a58a2d854978d559e29cf3f4
|
[
"BSD-3-Clause"
] | 1
|
2021-06-09T20:22:39.000Z
|
2021-06-09T20:22:39.000Z
|
"""Contains exports from the transformers subpackage"""
from fluxio_parser.transformers.data_dict import DataDictTransformer
from fluxio_parser.transformers.run_method import RunMethodTransformer
from fluxio_parser.transformers.script import ScriptTransformer
| 52
| 70
| 0.884615
| 29
| 260
| 7.758621
| 0.586207
| 0.133333
| 0.213333
| 0.373333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069231
| 260
| 4
| 71
| 65
| 0.929752
| 0.188462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
341db8f982686bdc05e7fea5f460507b7076a32a
| 1,276
|
py
|
Python
|
tests/test_658.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_658.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_658.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 658. Find K Closest Elements
"""
@pytest.fixture(scope="session")
def init_variables_658():
from src.leetcode_658_find_k_closest_elements import Solution
solution = Solution()
def _init_variables_658():
return solution
yield _init_variables_658
class TestClass658:
def test_solution_0(self, init_variables_658):
assert init_variables_658().findClosestElements([1, 2, 3, 4, 5], 4, 3) == [1, 2, 3, 4]
def test_solution_1(self, init_variables_658):
assert init_variables_658().findClosestElements([1, 2, 3, 4, 5], 4, -1) == [1, 2, 3, 4]
#!/usr/bin/env python
import pytest
"""
Test 658. Find K Closest Elements
"""
@pytest.fixture(scope="session")
def init_variables_658():
from src.leetcode_658_find_k_closest_elements import Solution
solution = Solution()
def _init_variables_658():
return solution
yield _init_variables_658
class TestClass658:
def test_solution_0(self, init_variables_658):
assert init_variables_658().findClosestElements([1, 2, 3, 4, 5], 4, 3) == [1, 2, 3, 4]
def test_solution_1(self, init_variables_658):
assert init_variables_658().findClosestElements([1, 2, 3, 4, 5], 4, -1) == [1, 2, 3, 4]
| 22.385965
| 95
| 0.68652
| 184
| 1,276
| 4.48913
| 0.190217
| 0.220339
| 0.271186
| 0.038741
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.104449
| 0.189655
| 1,276
| 56
| 96
| 22.785714
| 0.694391
| 0.031348
| 0
| 1
| 0
| 0
| 0.012153
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.307692
| false
| 0
| 0.153846
| 0.076923
| 0.615385
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
34502a9a4579f9434df7232f0efdd8a1859f7ff5
| 48
|
py
|
Python
|
app/util/__init__.py
|
Manuel7AP/dobc_web_app
|
ebb775e18a4f03f70d1bdb14a7ec8142bce9e857
|
[
"Apache-2.0"
] | 11
|
2015-08-28T17:48:20.000Z
|
2021-11-16T12:20:16.000Z
|
app/util/__init__.py
|
Manuel7AP/dobc_web_app
|
ebb775e18a4f03f70d1bdb14a7ec8142bce9e857
|
[
"Apache-2.0"
] | 9
|
2015-02-23T01:48:42.000Z
|
2021-12-07T09:59:57.000Z
|
app/util/__init__.py
|
Manuel7AP/dobc_web_app
|
ebb775e18a4f03f70d1bdb14a7ec8142bce9e857
|
[
"Apache-2.0"
] | 12
|
2015-01-06T17:21:21.000Z
|
2021-08-05T19:15:27.000Z
|
from flash_form_errors import flash_form_errors
| 24
| 47
| 0.916667
| 8
| 48
| 5
| 0.625
| 0.45
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 1
| 48
| 48
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3452ee91f25627b34cb8e218eb232e8a056f34f9
| 33
|
py
|
Python
|
projekt ASD CLICKER/exe-compiled/save.py
|
extara/PY_machine_clicker
|
6a0151132eb44c16e8d534154bfee1c6c95d7734
|
[
"MIT"
] | null | null | null |
projekt ASD CLICKER/exe-compiled/save.py
|
extara/PY_machine_clicker
|
6a0151132eb44c16e8d534154bfee1c6c95d7734
|
[
"MIT"
] | null | null | null |
projekt ASD CLICKER/exe-compiled/save.py
|
extara/PY_machine_clicker
|
6a0151132eb44c16e8d534154bfee1c6c95d7734
|
[
"MIT"
] | null | null | null |
46
1280
7
800
13
3000
5000
50000
| 3.666667
| 5
| 0.757576
| 8
| 33
| 3.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.242424
| 33
| 8
| 6
| 4.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
345da0dcb7cb02a63e13850045843ba4ee5a05a4
| 173
|
py
|
Python
|
tests/test_version.py
|
LucaCappelletti94/hpo_downloader
|
14497478626c234539659b9f55b16496e6b6b86b
|
[
"MIT"
] | null | null | null |
tests/test_version.py
|
LucaCappelletti94/hpo_downloader
|
14497478626c234539659b9f55b16496e6b6b86b
|
[
"MIT"
] | null | null | null |
tests/test_version.py
|
LucaCappelletti94/hpo_downloader
|
14497478626c234539659b9f55b16496e6b6b86b
|
[
"MIT"
] | null | null | null |
from validate_version_code import validate_version_code
from hpo_downloader.__version__ import __version__
def test_version():
assert validate_version_code(__version__)
| 34.6
| 55
| 0.872832
| 22
| 173
| 5.954545
| 0.454545
| 0.343511
| 0.435115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092486
| 173
| 5
| 56
| 34.6
| 0.834395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3481748156a53070d1dacc05ed123a4bc85f9630
| 35,182
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/node_group.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/node_group.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/node_group.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['NodeGroupArgs', 'NodeGroup']
@pulumi.input_type
class NodeGroupArgs:
def __init__(__self__, *,
node_template: pulumi.Input[str],
autoscaling_policy: Optional[pulumi.Input['NodeGroupAutoscalingPolicyArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
initial_size: Optional[pulumi.Input[int]] = None,
maintenance_policy: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input['NodeGroupMaintenanceWindowArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a NodeGroup resource.
:param pulumi.Input[str] node_template: The URL of the node template to which this node group belongs.
:param pulumi.Input['NodeGroupAutoscalingPolicyArgs'] autoscaling_policy: If you use sole-tenant nodes for your workloads, you can use the node
group autoscaler to automatically manage the sizes of your node groups.
Structure is documented below.
:param pulumi.Input[str] description: An optional textual description of the resource.
:param pulumi.Input[int] initial_size: The initial number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] maintenance_policy: Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT.
:param pulumi.Input['NodeGroupMaintenanceWindowArgs'] maintenance_window: contains properties for the timeframe of maintenance
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[int] size: The total number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] zone: Zone where this node group is located
"""
pulumi.set(__self__, "node_template", node_template)
if autoscaling_policy is not None:
pulumi.set(__self__, "autoscaling_policy", autoscaling_policy)
if description is not None:
pulumi.set(__self__, "description", description)
if initial_size is not None:
pulumi.set(__self__, "initial_size", initial_size)
if maintenance_policy is not None:
pulumi.set(__self__, "maintenance_policy", maintenance_policy)
if maintenance_window is not None:
pulumi.set(__self__, "maintenance_window", maintenance_window)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if size is not None:
pulumi.set(__self__, "size", size)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="nodeTemplate")
def node_template(self) -> pulumi.Input[str]:
"""
The URL of the node template to which this node group belongs.
"""
return pulumi.get(self, "node_template")
@node_template.setter
def node_template(self, value: pulumi.Input[str]):
pulumi.set(self, "node_template", value)
@property
@pulumi.getter(name="autoscalingPolicy")
def autoscaling_policy(self) -> Optional[pulumi.Input['NodeGroupAutoscalingPolicyArgs']]:
"""
If you use sole-tenant nodes for your workloads, you can use the node
group autoscaler to automatically manage the sizes of your node groups.
Structure is documented below.
"""
return pulumi.get(self, "autoscaling_policy")
@autoscaling_policy.setter
def autoscaling_policy(self, value: Optional[pulumi.Input['NodeGroupAutoscalingPolicyArgs']]):
pulumi.set(self, "autoscaling_policy", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional textual description of the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="initialSize")
def initial_size(self) -> Optional[pulumi.Input[int]]:
"""
The initial number of nodes in the node group. One of `initial_size` or `size` must be specified.
"""
return pulumi.get(self, "initial_size")
@initial_size.setter
def initial_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "initial_size", value)
@property
@pulumi.getter(name="maintenancePolicy")
def maintenance_policy(self) -> Optional[pulumi.Input[str]]:
"""
Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT.
"""
return pulumi.get(self, "maintenance_policy")
@maintenance_policy.setter
def maintenance_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "maintenance_policy", value)
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> Optional[pulumi.Input['NodeGroupMaintenanceWindowArgs']]:
"""
contains properties for the timeframe of maintenance
Structure is documented below.
"""
return pulumi.get(self, "maintenance_window")
@maintenance_window.setter
def maintenance_window(self, value: Optional[pulumi.Input['NodeGroupMaintenanceWindowArgs']]):
pulumi.set(self, "maintenance_window", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[int]]:
"""
The total number of nodes in the node group. One of `initial_size` or `size` must be specified.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
"""
Zone where this node group is located
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
@pulumi.input_type
class _NodeGroupState:
def __init__(__self__, *,
autoscaling_policy: Optional[pulumi.Input['NodeGroupAutoscalingPolicyArgs']] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
initial_size: Optional[pulumi.Input[int]] = None,
maintenance_policy: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input['NodeGroupMaintenanceWindowArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
node_template: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering NodeGroup resources.
:param pulumi.Input['NodeGroupAutoscalingPolicyArgs'] autoscaling_policy: If you use sole-tenant nodes for your workloads, you can use the node
group autoscaler to automatically manage the sizes of your node groups.
Structure is documented below.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional textual description of the resource.
:param pulumi.Input[int] initial_size: The initial number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] maintenance_policy: Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT.
:param pulumi.Input['NodeGroupMaintenanceWindowArgs'] maintenance_window: contains properties for the timeframe of maintenance
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] node_template: The URL of the node template to which this node group belongs.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[int] size: The total number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] zone: Zone where this node group is located
"""
if autoscaling_policy is not None:
pulumi.set(__self__, "autoscaling_policy", autoscaling_policy)
if creation_timestamp is not None:
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if description is not None:
pulumi.set(__self__, "description", description)
if initial_size is not None:
pulumi.set(__self__, "initial_size", initial_size)
if maintenance_policy is not None:
pulumi.set(__self__, "maintenance_policy", maintenance_policy)
if maintenance_window is not None:
pulumi.set(__self__, "maintenance_window", maintenance_window)
if name is not None:
pulumi.set(__self__, "name", name)
if node_template is not None:
pulumi.set(__self__, "node_template", node_template)
if project is not None:
pulumi.set(__self__, "project", project)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if size is not None:
pulumi.set(__self__, "size", size)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="autoscalingPolicy")
def autoscaling_policy(self) -> Optional[pulumi.Input['NodeGroupAutoscalingPolicyArgs']]:
"""
If you use sole-tenant nodes for your workloads, you can use the node
group autoscaler to automatically manage the sizes of your node groups.
Structure is documented below.
"""
return pulumi.get(self, "autoscaling_policy")
@autoscaling_policy.setter
def autoscaling_policy(self, value: Optional[pulumi.Input['NodeGroupAutoscalingPolicyArgs']]):
pulumi.set(self, "autoscaling_policy", value)
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@creation_timestamp.setter
def creation_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "creation_timestamp", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional textual description of the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="initialSize")
def initial_size(self) -> Optional[pulumi.Input[int]]:
"""
The initial number of nodes in the node group. One of `initial_size` or `size` must be specified.
"""
return pulumi.get(self, "initial_size")
@initial_size.setter
def initial_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "initial_size", value)
@property
@pulumi.getter(name="maintenancePolicy")
def maintenance_policy(self) -> Optional[pulumi.Input[str]]:
"""
Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT.
"""
return pulumi.get(self, "maintenance_policy")
@maintenance_policy.setter
def maintenance_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "maintenance_policy", value)
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> Optional[pulumi.Input['NodeGroupMaintenanceWindowArgs']]:
"""
contains properties for the timeframe of maintenance
Structure is documented below.
"""
return pulumi.get(self, "maintenance_window")
@maintenance_window.setter
def maintenance_window(self, value: Optional[pulumi.Input['NodeGroupMaintenanceWindowArgs']]):
pulumi.set(self, "maintenance_window", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nodeTemplate")
def node_template(self) -> Optional[pulumi.Input[str]]:
"""
The URL of the node template to which this node group belongs.
"""
return pulumi.get(self, "node_template")
@node_template.setter
def node_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_template", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[int]]:
"""
The total number of nodes in the node group. One of `initial_size` or `size` must be specified.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
"""
Zone where this node group is located
"""
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
class NodeGroup(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autoscaling_policy: Optional[pulumi.Input[pulumi.InputType['NodeGroupAutoscalingPolicyArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
initial_size: Optional[pulumi.Input[int]] = None,
maintenance_policy: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[pulumi.InputType['NodeGroupMaintenanceWindowArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
node_template: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Represents a NodeGroup resource to manage a group of sole-tenant nodes.
To get more information about NodeGroup, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/nodeGroups)
* How-to Guides
* [Sole-Tenant Nodes](https://cloud.google.com/compute/docs/nodes/)
> **Warning:** Due to limitations of the API, this provider cannot update the
number of nodes in a node group and changes to node group size either
through provider config or through external changes will cause
the provider to delete and recreate the node group.
## Example Usage
### Node Group Basic
```python
import pulumi
import pulumi_gcp as gcp
soletenant_tmpl = gcp.compute.NodeTemplate("soletenant-tmpl",
region="us-central1",
node_type="n1-node-96-624")
nodes = gcp.compute.NodeGroup("nodes",
zone="us-central1-a",
description="example google_compute_node_group for the Google Provider",
size=1,
node_template=soletenant_tmpl.id)
```
### Node Group Autoscaling Policy
```python
import pulumi
import pulumi_gcp as gcp
soletenant_tmpl = gcp.compute.NodeTemplate("soletenant-tmpl",
region="us-central1",
node_type="n1-node-96-624")
nodes = gcp.compute.NodeGroup("nodes",
zone="us-central1-a",
description="example google_compute_node_group for Google Provider",
maintenance_policy="RESTART_IN_PLACE",
maintenance_window=gcp.compute.NodeGroupMaintenanceWindowArgs(
start_time="08:00",
),
initial_size=1,
node_template=soletenant_tmpl.id,
autoscaling_policy=gcp.compute.NodeGroupAutoscalingPolicyArgs(
mode="ONLY_SCALE_OUT",
min_nodes=1,
max_nodes=10,
))
```
## Import
NodeGroup can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default projects/{{project}}/zones/{{zone}}/nodeGroups/{{name}}
```
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default {{project}}/{{zone}}/{{name}}
```
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default {{zone}}/{{name}}
```
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['NodeGroupAutoscalingPolicyArgs']] autoscaling_policy: If you use sole-tenant nodes for your workloads, you can use the node
group autoscaler to automatically manage the sizes of your node groups.
Structure is documented below.
:param pulumi.Input[str] description: An optional textual description of the resource.
:param pulumi.Input[int] initial_size: The initial number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] maintenance_policy: Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT.
:param pulumi.Input[pulumi.InputType['NodeGroupMaintenanceWindowArgs']] maintenance_window: contains properties for the timeframe of maintenance
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] node_template: The URL of the node template to which this node group belongs.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[int] size: The total number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] zone: Zone where this node group is located
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NodeGroupArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a NodeGroup resource to manage a group of sole-tenant nodes.
To get more information about NodeGroup, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/nodeGroups)
* How-to Guides
* [Sole-Tenant Nodes](https://cloud.google.com/compute/docs/nodes/)
> **Warning:** Due to limitations of the API, this provider cannot update the
number of nodes in a node group and changes to node group size either
through provider config or through external changes will cause
the provider to delete and recreate the node group.
## Example Usage
### Node Group Basic
```python
import pulumi
import pulumi_gcp as gcp
soletenant_tmpl = gcp.compute.NodeTemplate("soletenant-tmpl",
region="us-central1",
node_type="n1-node-96-624")
nodes = gcp.compute.NodeGroup("nodes",
zone="us-central1-a",
description="example google_compute_node_group for the Google Provider",
size=1,
node_template=soletenant_tmpl.id)
```
### Node Group Autoscaling Policy
```python
import pulumi
import pulumi_gcp as gcp
soletenant_tmpl = gcp.compute.NodeTemplate("soletenant-tmpl",
region="us-central1",
node_type="n1-node-96-624")
nodes = gcp.compute.NodeGroup("nodes",
zone="us-central1-a",
description="example google_compute_node_group for Google Provider",
maintenance_policy="RESTART_IN_PLACE",
maintenance_window=gcp.compute.NodeGroupMaintenanceWindowArgs(
start_time="08:00",
),
initial_size=1,
node_template=soletenant_tmpl.id,
autoscaling_policy=gcp.compute.NodeGroupAutoscalingPolicyArgs(
mode="ONLY_SCALE_OUT",
min_nodes=1,
max_nodes=10,
))
```
## Import
NodeGroup can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default projects/{{project}}/zones/{{zone}}/nodeGroups/{{name}}
```
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default {{project}}/{{zone}}/{{name}}
```
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default {{zone}}/{{name}}
```
```sh
$ pulumi import gcp:compute/nodeGroup:NodeGroup default {{name}}
```
:param str resource_name: The name of the resource.
:param NodeGroupArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NodeGroupArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autoscaling_policy: Optional[pulumi.Input[pulumi.InputType['NodeGroupAutoscalingPolicyArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
initial_size: Optional[pulumi.Input[int]] = None,
maintenance_policy: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[pulumi.InputType['NodeGroupMaintenanceWindowArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
node_template: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NodeGroupArgs.__new__(NodeGroupArgs)
__props__.__dict__["autoscaling_policy"] = autoscaling_policy
__props__.__dict__["description"] = description
__props__.__dict__["initial_size"] = initial_size
__props__.__dict__["maintenance_policy"] = maintenance_policy
__props__.__dict__["maintenance_window"] = maintenance_window
__props__.__dict__["name"] = name
if node_template is None and not opts.urn:
raise TypeError("Missing required property 'node_template'")
__props__.__dict__["node_template"] = node_template
__props__.__dict__["project"] = project
__props__.__dict__["size"] = size
__props__.__dict__["zone"] = zone
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["self_link"] = None
super(NodeGroup, __self__).__init__(
'gcp:compute/nodeGroup:NodeGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
autoscaling_policy: Optional[pulumi.Input[pulumi.InputType['NodeGroupAutoscalingPolicyArgs']]] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
initial_size: Optional[pulumi.Input[int]] = None,
maintenance_policy: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[pulumi.InputType['NodeGroupMaintenanceWindowArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
node_template: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
zone: Optional[pulumi.Input[str]] = None) -> 'NodeGroup':
"""
Get an existing NodeGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['NodeGroupAutoscalingPolicyArgs']] autoscaling_policy: If you use sole-tenant nodes for your workloads, you can use the node
group autoscaler to automatically manage the sizes of your node groups.
Structure is documented below.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional textual description of the resource.
:param pulumi.Input[int] initial_size: The initial number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] maintenance_policy: Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT.
:param pulumi.Input[pulumi.InputType['NodeGroupMaintenanceWindowArgs']] maintenance_window: contains properties for the timeframe of maintenance
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] node_template: The URL of the node template to which this node group belongs.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[int] size: The total number of nodes in the node group. One of `initial_size` or `size` must be specified.
:param pulumi.Input[str] zone: Zone where this node group is located
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NodeGroupState.__new__(_NodeGroupState)
__props__.__dict__["autoscaling_policy"] = autoscaling_policy
__props__.__dict__["creation_timestamp"] = creation_timestamp
__props__.__dict__["description"] = description
__props__.__dict__["initial_size"] = initial_size
__props__.__dict__["maintenance_policy"] = maintenance_policy
__props__.__dict__["maintenance_window"] = maintenance_window
__props__.__dict__["name"] = name
__props__.__dict__["node_template"] = node_template
__props__.__dict__["project"] = project
__props__.__dict__["self_link"] = self_link
__props__.__dict__["size"] = size
__props__.__dict__["zone"] = zone
return NodeGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="autoscalingPolicy")
def autoscaling_policy(self) -> pulumi.Output['outputs.NodeGroupAutoscalingPolicy']:
"""
If you use sole-tenant nodes for your workloads, you can use the node
group autoscaler to automatically manage the sizes of your node groups.
Structure is documented below.
"""
return pulumi.get(self, "autoscaling_policy")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional textual description of the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="initialSize")
def initial_size(self) -> pulumi.Output[Optional[int]]:
"""
The initial number of nodes in the node group. One of `initial_size` or `size` must be specified.
"""
return pulumi.get(self, "initial_size")
@property
@pulumi.getter(name="maintenancePolicy")
def maintenance_policy(self) -> pulumi.Output[Optional[str]]:
"""
Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT.
"""
return pulumi.get(self, "maintenance_policy")
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> pulumi.Output[Optional['outputs.NodeGroupMaintenanceWindow']]:
"""
contains properties for the timeframe of maintenance
Structure is documented below.
"""
return pulumi.get(self, "maintenance_window")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nodeTemplate")
def node_template(self) -> pulumi.Output[str]:
"""
The URL of the node template to which this node group belongs.
"""
return pulumi.get(self, "node_template")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter
def size(self) -> pulumi.Output[int]:
"""
The total number of nodes in the node group. One of `initial_size` or `size` must be specified.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def zone(self) -> pulumi.Output[str]:
"""
Zone where this node group is located
"""
return pulumi.get(self, "zone")
| 43.86783
| 236
| 0.647547
| 4,056
| 35,182
| 5.439596
| 0.064349
| 0.072792
| 0.081811
| 0.058832
| 0.910257
| 0.89539
| 0.883017
| 0.876173
| 0.863527
| 0.838009
| 0
| 0.00263
| 0.25442
| 35,182
| 801
| 237
| 43.922597
| 0.838474
| 0.398044
| 0
| 0.796875
| 1
| 0
| 0.122079
| 0.034169
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164063
| false
| 0.002604
| 0.018229
| 0
| 0.28125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cace876d4f7a98fd6ce01eeef50003644fc57388
| 60,582
|
py
|
Python
|
esp_sdk/apis/custom_compliance_controls_api.py
|
zimmermanc/esp-sdk-python
|
cdef13c0dc6c3996b6c444160c71b2f1e3910c97
|
[
"MIT"
] | 6
|
2017-06-05T20:37:19.000Z
|
2019-04-10T08:43:59.000Z
|
esp_sdk/apis/custom_compliance_controls_api.py
|
zimmermanc/esp-sdk-python
|
cdef13c0dc6c3996b6c444160c71b2f1e3910c97
|
[
"MIT"
] | 18
|
2016-06-22T16:14:33.000Z
|
2018-10-29T21:53:15.000Z
|
esp_sdk/apis/custom_compliance_controls_api.py
|
zimmermanc/esp-sdk-python
|
cdef13c0dc6c3996b6c444160c71b2f1e3910c97
|
[
"MIT"
] | 18
|
2016-07-27T19:20:01.000Z
|
2020-11-17T02:09:58.000Z
|
# coding: utf-8
"""
ESP Documentation
The Evident Security Platform API (version 2.0) is designed to allow users granular control over their Amazon Web Service security experience by allowing them to review alerts, monitor signatures, and create custom signatures.
OpenAPI spec version: v2_sdk
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class CustomComplianceControlsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_custom_signature(self, custom_compliance_control_id, custom_signature_id, **kwargs):
"""
Add a Custom Signature to a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_custom_signature(custom_compliance_control_id, custom_signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this custom signature belongs to (required)
:param int custom_signature_id: The ID of the custom signature that belongs to this custom control (required)
:param str include: Related objects that can be included in the response: organization, teams, external_accounts, definitions, suppressions, service See Including Objects for more information.
:return: CustomSignature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_custom_signature_with_http_info(custom_compliance_control_id, custom_signature_id, **kwargs)
else:
(data) = self.add_custom_signature_with_http_info(custom_compliance_control_id, custom_signature_id, **kwargs)
return data
def add_custom_signature_with_http_info(self, custom_compliance_control_id, custom_signature_id, **kwargs):
"""
Add a Custom Signature to a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_custom_signature_with_http_info(custom_compliance_control_id, custom_signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this custom signature belongs to (required)
:param int custom_signature_id: The ID of the custom signature that belongs to this custom control (required)
:param str include: Related objects that can be included in the response: organization, teams, external_accounts, definitions, suppressions, service See Including Objects for more information.
:return: CustomSignature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_compliance_control_id', 'custom_signature_id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_custom_signature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_compliance_control_id' is set
if ('custom_compliance_control_id' not in params) or (params['custom_compliance_control_id'] is None):
raise ValueError("Missing the required parameter `custom_compliance_control_id` when calling `add_custom_signature`")
# verify the required parameter 'custom_signature_id' is set
if ('custom_signature_id' not in params) or (params['custom_signature_id'] is None):
raise ValueError("Missing the required parameter `custom_signature_id` when calling `add_custom_signature`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{custom_compliance_control_id}/custom_signatures.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_compliance_control_id' in params:
path_params['custom_compliance_control_id'] = params['custom_compliance_control_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'custom_signature_id' in params:
form_params.append(('custom_signature_id', params['custom_signature_id']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomSignature',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_signature(self, custom_compliance_control_id, signature_id, **kwargs):
"""
Add a Signature to a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_signature(custom_compliance_control_id, signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this signature belongs to (required)
:param int signature_id: The ID of the signature that belongs to this custom control (required)
:param str include: Related objects that can be included in the response: service, suppressions See Including Objects for more information.
:return: Signature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_signature_with_http_info(custom_compliance_control_id, signature_id, **kwargs)
else:
(data) = self.add_signature_with_http_info(custom_compliance_control_id, signature_id, **kwargs)
return data
def add_signature_with_http_info(self, custom_compliance_control_id, signature_id, **kwargs):
"""
Add a Signature to a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_signature_with_http_info(custom_compliance_control_id, signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this signature belongs to (required)
:param int signature_id: The ID of the signature that belongs to this custom control (required)
:param str include: Related objects that can be included in the response: service, suppressions See Including Objects for more information.
:return: Signature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_compliance_control_id', 'signature_id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_signature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_compliance_control_id' is set
if ('custom_compliance_control_id' not in params) or (params['custom_compliance_control_id'] is None):
raise ValueError("Missing the required parameter `custom_compliance_control_id` when calling `add_signature`")
# verify the required parameter 'signature_id' is set
if ('signature_id' not in params) or (params['signature_id'] is None):
raise ValueError("Missing the required parameter `signature_id` when calling `add_signature`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{custom_compliance_control_id}/signatures.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_compliance_control_id' in params:
path_params['custom_compliance_control_id'] = params['custom_compliance_control_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'signature_id' in params:
form_params.append(('signature_id', params['signature_id']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Signature',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, custom_compliance_domain_id, identifier, name, **kwargs):
"""
Create a(n) Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create(custom_compliance_domain_id, identifier, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_domain_id: The ID of the Custom Compliance Domain this custom control belongs to (required)
:param str identifier: The identifier of this custom control (required)
:param str name: Name (required)
:param str include: Related objects that can be included in the response: custom_compliance_standard, custom_compliance_domain, signatures, custom_signatures See Including Objects for more information.
:param list[int] custom_signature_ids: An array of custom signatures identified by custom_signature_id that belong to this custom control
:param str description: The description for this custom control
:param int position: The position of this custom control within the custom domain
:param list[int] signature_ids: An array of signatures identified by signature_id that belong to this custom control
:return: CustomComplianceControl
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_with_http_info(custom_compliance_domain_id, identifier, name, **kwargs)
else:
(data) = self.create_with_http_info(custom_compliance_domain_id, identifier, name, **kwargs)
return data
def create_with_http_info(self, custom_compliance_domain_id, identifier, name, **kwargs):
"""
Create a(n) Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_with_http_info(custom_compliance_domain_id, identifier, name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_domain_id: The ID of the Custom Compliance Domain this custom control belongs to (required)
:param str identifier: The identifier of this custom control (required)
:param str name: Name (required)
:param str include: Related objects that can be included in the response: custom_compliance_standard, custom_compliance_domain, signatures, custom_signatures See Including Objects for more information.
:param list[int] custom_signature_ids: An array of custom signatures identified by custom_signature_id that belong to this custom control
:param str description: The description for this custom control
:param int position: The position of this custom control within the custom domain
:param list[int] signature_ids: An array of signatures identified by signature_id that belong to this custom control
:return: CustomComplianceControl
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_compliance_domain_id', 'identifier', 'name', 'include', 'custom_signature_ids', 'description', 'position', 'signature_ids']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_compliance_domain_id' is set
if ('custom_compliance_domain_id' not in params) or (params['custom_compliance_domain_id'] is None):
raise ValueError("Missing the required parameter `custom_compliance_domain_id` when calling `create`")
# verify the required parameter 'identifier' is set
if ('identifier' not in params) or (params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `create`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `create`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls.json_api'.replace('{format}', 'json_api')
path_params = {}
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'custom_compliance_domain_id' in params:
form_params.append(('custom_compliance_domain_id', params['custom_compliance_domain_id']))
if 'custom_signature_ids' in params:
form_params.append(('custom_signature_ids', params['custom_signature_ids']))
collection_formats['None'] = 'csv'
if 'description' in params:
form_params.append(('description', params['description']))
if 'identifier' in params:
form_params.append(('identifier', params['identifier']))
if 'name' in params:
form_params.append(('name', params['name']))
if 'position' in params:
form_params.append(('position', params['position']))
if 'signature_ids' in params:
form_params.append(('signature_ids', params['signature_ids']))
collection_formats['None'] = 'csv'
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomComplianceControl',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete(self, id, **kwargs):
"""
Delete a(n) Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Compliance Control ID (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_with_http_info(id, **kwargs)
else:
(data) = self.delete_with_http_info(id, **kwargs)
return data
def delete_with_http_info(self, id, **kwargs):
"""
Delete a(n) Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Compliance Control ID (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Meta',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_custom_signatures(self, custom_compliance_control_id, **kwargs):
"""
Get a list of Custom Signatures for a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_custom_signatures(custom_compliance_control_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this custom signature belongs to (required)
:param str include: Related objects that can be included in the response: organization, teams, external_accounts, definitions, suppressions, service See Including Objects for more information.
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_custom_signatures_with_http_info(custom_compliance_control_id, **kwargs)
else:
(data) = self.list_custom_signatures_with_http_info(custom_compliance_control_id, **kwargs)
return data
def list_custom_signatures_with_http_info(self, custom_compliance_control_id, **kwargs):
"""
Get a list of Custom Signatures for a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_custom_signatures_with_http_info(custom_compliance_control_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this custom signature belongs to (required)
:param str include: Related objects that can be included in the response: organization, teams, external_accounts, definitions, suppressions, service See Including Objects for more information.
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_compliance_control_id', 'include', 'page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_custom_signatures" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_compliance_control_id' is set
if ('custom_compliance_control_id' not in params) or (params['custom_compliance_control_id'] is None):
raise ValueError("Missing the required parameter `custom_compliance_control_id` when calling `list_custom_signatures`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{custom_compliance_control_id}/custom_signatures.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_compliance_control_id' in params:
path_params['custom_compliance_control_id'] = params['custom_compliance_control_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'page' in params:
form_params.append(('page', params['page']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaginatedCollection',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_signatures(self, custom_compliance_control_id, **kwargs):
"""
Get a list of Signatures for a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_signatures(custom_compliance_control_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this signature belongs to (required)
:param str include: Related objects that can be included in the response: service, suppressions See Including Objects for more information.
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_signatures_with_http_info(custom_compliance_control_id, **kwargs)
else:
(data) = self.list_signatures_with_http_info(custom_compliance_control_id, **kwargs)
return data
def list_signatures_with_http_info(self, custom_compliance_control_id, **kwargs):
"""
Get a list of Signatures for a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_signatures_with_http_info(custom_compliance_control_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this signature belongs to (required)
:param str include: Related objects that can be included in the response: service, suppressions See Including Objects for more information.
:param str page: Page Number and Page Size. Number is the page number of the collection to return, size is the number of items to return per page.
:return: PaginatedCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_compliance_control_id', 'include', 'page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_signatures" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_compliance_control_id' is set
if ('custom_compliance_control_id' not in params) or (params['custom_compliance_control_id'] is None):
raise ValueError("Missing the required parameter `custom_compliance_control_id` when calling `list_signatures`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{custom_compliance_control_id}/signatures.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_compliance_control_id' in params:
path_params['custom_compliance_control_id'] = params['custom_compliance_control_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'page' in params:
form_params.append(('page', params['page']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PaginatedCollection',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_custom_signature(self, custom_compliance_control_id, custom_signature_id, **kwargs):
"""
Remove a Custom Signature from a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_custom_signature(custom_compliance_control_id, custom_signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this custom signature belongs to (required)
:param int custom_signature_id: The ID of the custom signature that belongs to this custom control (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.remove_custom_signature_with_http_info(custom_compliance_control_id, custom_signature_id, **kwargs)
else:
(data) = self.remove_custom_signature_with_http_info(custom_compliance_control_id, custom_signature_id, **kwargs)
return data
def remove_custom_signature_with_http_info(self, custom_compliance_control_id, custom_signature_id, **kwargs):
"""
Remove a Custom Signature from a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_custom_signature_with_http_info(custom_compliance_control_id, custom_signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this custom signature belongs to (required)
:param int custom_signature_id: The ID of the custom signature that belongs to this custom control (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_compliance_control_id', 'custom_signature_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_custom_signature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_compliance_control_id' is set
if ('custom_compliance_control_id' not in params) or (params['custom_compliance_control_id'] is None):
raise ValueError("Missing the required parameter `custom_compliance_control_id` when calling `remove_custom_signature`")
# verify the required parameter 'custom_signature_id' is set
if ('custom_signature_id' not in params) or (params['custom_signature_id'] is None):
raise ValueError("Missing the required parameter `custom_signature_id` when calling `remove_custom_signature`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{custom_compliance_control_id}/custom_signatures/{custom_signature_id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_compliance_control_id' in params:
path_params['custom_compliance_control_id'] = params['custom_compliance_control_id']
if 'custom_signature_id' in params:
path_params['custom_signature_id'] = params['custom_signature_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Meta',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_signature(self, custom_compliance_control_id, signature_id, **kwargs):
"""
Remove a Signature from a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_signature(custom_compliance_control_id, signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this signature belongs to (required)
:param int signature_id: The ID of the signature that belongs to this custom control (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.remove_signature_with_http_info(custom_compliance_control_id, signature_id, **kwargs)
else:
(data) = self.remove_signature_with_http_info(custom_compliance_control_id, signature_id, **kwargs)
return data
def remove_signature_with_http_info(self, custom_compliance_control_id, signature_id, **kwargs):
"""
Remove a Signature from a Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_signature_with_http_info(custom_compliance_control_id, signature_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int custom_compliance_control_id: The ID of the Custom Compliance Control this signature belongs to (required)
:param int signature_id: The ID of the signature that belongs to this custom control (required)
:return: Meta
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_compliance_control_id', 'signature_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_signature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'custom_compliance_control_id' is set
if ('custom_compliance_control_id' not in params) or (params['custom_compliance_control_id'] is None):
raise ValueError("Missing the required parameter `custom_compliance_control_id` when calling `remove_signature`")
# verify the required parameter 'signature_id' is set
if ('signature_id' not in params) or (params['signature_id'] is None):
raise ValueError("Missing the required parameter `signature_id` when calling `remove_signature`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{custom_compliance_control_id}/signatures/{signature_id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'custom_compliance_control_id' in params:
path_params['custom_compliance_control_id'] = params['custom_compliance_control_id']
if 'signature_id' in params:
path_params['signature_id'] = params['signature_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Meta',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def show(self, id, **kwargs):
"""
Show a single Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Compliance Control ID (required)
:param str include: Related objects that can be included in the response: custom_compliance_standard, custom_compliance_domain, signatures, custom_signatures See Including Objects for more information.
:return: CustomComplianceControl
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.show_with_http_info(id, **kwargs)
else:
(data) = self.show_with_http_info(id, **kwargs)
return data
def show_with_http_info(self, id, **kwargs):
"""
Show a single Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Compliance Control ID (required)
:param str include: Related objects that can be included in the response: custom_compliance_standard, custom_compliance_domain, signatures, custom_signatures See Including Objects for more information.
:return: CustomComplianceControl
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method show" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `show`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomComplianceControl',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, id, **kwargs):
"""
Update a(n) Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Compliance Control ID (required)
:param str include: Related objects that can be included in the response: custom_compliance_standard, custom_compliance_domain, signatures, custom_signatures See Including Objects for more information.
:param int custom_compliance_domain_id: The ID of the Custom Compliance Domain this custom control belongs to
:param list[int] custom_signature_ids: An array of custom signatures identified by custom_signature_id that belong to this custom control
:param str description: The description for this custom control
:param str identifier: The identifier of this custom control
:param str name: Name
:param int position: The position of this custom control within the custom domain
:param list[int] signature_ids: An array of signatures identified by signature_id that belong to this custom control
:return: CustomComplianceControl
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_with_http_info(id, **kwargs)
else:
(data) = self.update_with_http_info(id, **kwargs)
return data
def update_with_http_info(self, id, **kwargs):
"""
Update a(n) Custom Compliance Control
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Custom Compliance Control ID (required)
:param str include: Related objects that can be included in the response: custom_compliance_standard, custom_compliance_domain, signatures, custom_signatures See Including Objects for more information.
:param int custom_compliance_domain_id: The ID of the Custom Compliance Domain this custom control belongs to
:param list[int] custom_signature_ids: An array of custom signatures identified by custom_signature_id that belong to this custom control
:param str description: The description for this custom control
:param str identifier: The identifier of this custom control
:param str name: Name
:param int position: The position of this custom control within the custom domain
:param list[int] signature_ids: An array of signatures identified by signature_id that belong to this custom control
:return: CustomComplianceControl
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include', 'custom_compliance_domain_id', 'custom_signature_ids', 'description', 'identifier', 'name', 'position', 'signature_ids']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update`")
collection_formats = {}
resource_path = '/api/v2/custom_compliance_controls/{id}.json_api'.replace('{format}', 'json_api')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
if 'custom_compliance_domain_id' in params:
form_params.append(('custom_compliance_domain_id', params['custom_compliance_domain_id']))
if 'custom_signature_ids' in params:
form_params.append(('custom_signature_ids', params['custom_signature_ids']))
collection_formats['None'] = 'csv'
if 'description' in params:
form_params.append(('description', params['description']))
if 'identifier' in params:
form_params.append(('identifier', params['identifier']))
if 'name' in params:
form_params.append(('name', params['name']))
if 'position' in params:
form_params.append(('position', params['position']))
if 'signature_ids' in params:
form_params.append(('signature_ids', params['signature_ids']))
collection_formats['None'] = 'csv'
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomComplianceControl',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.93538
| 230
| 0.615133
| 6,511
| 60,582
| 5.480418
| 0.034403
| 0.084298
| 0.090239
| 0.075666
| 0.971583
| 0.967884
| 0.962811
| 0.952835
| 0.942606
| 0.940476
| 0
| 0.000382
| 0.309432
| 60,582
| 1,237
| 231
| 48.974939
| 0.852543
| 0.365439
| 0
| 0.81759
| 0
| 0
| 0.222175
| 0.103778
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034202
| false
| 0
| 0.011401
| 0
| 0.096091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
caec440d4d0cc71b65ff3dbff33f1cab35d337db
| 167
|
py
|
Python
|
tests/parser/grounding.backjump.13.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.backjump.13.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.backjump.13.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
num(2).
node(a).
p(N) :- num(N), #count{Y:node(Y)} = N1, N<=N1.
"""
output = """
num(2).
node(a).
p(N) :- num(N), #count{Y:node(Y)} = N1, N<=N1.
"""
| 15.181818
| 47
| 0.437126
| 32
| 167
| 2.28125
| 0.34375
| 0.109589
| 0.219178
| 0.246575
| 0.849315
| 0.849315
| 0.849315
| 0.849315
| 0.849315
| 0.849315
| 0
| 0.044444
| 0.191617
| 167
| 10
| 48
| 16.7
| 0.496296
| 0
| 0
| 0.8
| 0
| 0.2
| 0.807453
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
1b0ff6726fa4e15c96ef698253008513cd2625f3
| 270,684
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR subscriber\-infra\-tmplmgr package configuration.
This module contains definitions
for the following management objects\:
dynamic\-template\: All dynamic template configurations
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class DynamicTemplate(object):
"""
All dynamic template configurations
.. attribute:: ip_subscribers
The IP Subscriber Template Table
**type**\: :py:class:`IpSubscribers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers>`
.. attribute:: ppps
Templates of the PPP Type
**type**\: :py:class:`Ppps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps>`
.. attribute:: subscriber_services
The Service Type Template Table
**type**\: :py:class:`SubscriberServices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices>`
"""
_prefix = 'subscriber-infra-tmplmgr-cfg'
_revision = '2015-01-07'
def __init__(self):
self.ip_subscribers = DynamicTemplate.IpSubscribers()
self.ip_subscribers.parent = self
self.ppps = DynamicTemplate.Ppps()
self.ppps.parent = self
self.subscriber_services = DynamicTemplate.SubscriberServices()
self.subscriber_services.parent = self
class Ppps(object):
"""
Templates of the PPP Type
.. attribute:: ppp
A Template of the PPP Type
**type**\: list of :py:class:`Ppp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp>`
"""
_prefix = 'subscriber-infra-tmplmgr-cfg'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.ppp = YList()
self.ppp.parent = self
self.ppp.name = 'ppp'
class Ppp(object):
"""
A Template of the PPP Type
.. attribute:: template_name <key>
The name of the template
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: dhcpv6
Interface dhcpv6 configuration data
**type**\: :py:class:`Dhcpv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Dhcpv6>`
.. attribute:: igmp
IGMPconfiguration
**type**\: :py:class:`Igmp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Igmp>`
.. attribute:: ipv4_network
Interface IPv4 Network configuration data
**type**\: :py:class:`Ipv4Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv4Network>`
.. attribute:: ipv4_packet_filter
IPv4 Packet Filtering configuration for the template
**type**\: :py:class:`Ipv4PacketFilter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter>`
.. attribute:: ipv6_neighbor
Interface IPv6 Network configuration data
**type**\: :py:class:`Ipv6Neighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6Neighbor>`
.. attribute:: ipv6_network
Interface IPv6 Network configuration data
**type**\: :py:class:`Ipv6Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6Network>`
.. attribute:: ipv6_packet_filter
IPv6 Packet Filtering configuration for the interface
**type**\: :py:class:`Ipv6PacketFilter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter>`
.. attribute:: pbr
Dynamic Template PBR configuration
**type**\: :py:class:`Pbr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Pbr>`
.. attribute:: qos
QoS dynamically applied configuration template
**type**\: :py:class:`Qos <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Qos>`
.. attribute:: span_monitor_sessions
Monitor Session container for this template
**type**\: :py:class:`SpanMonitorSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.SpanMonitorSessions>`
.. attribute:: vrf
Assign the interface to a VRF
**type**\: str
**length:** 0..32
"""
_prefix = 'subscriber-infra-tmplmgr-cfg'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.template_name = None
self.dhcpv6 = DynamicTemplate.Ppps.Ppp.Dhcpv6()
self.dhcpv6.parent = self
self.igmp = DynamicTemplate.Ppps.Ppp.Igmp()
self.igmp.parent = self
self.ipv4_network = DynamicTemplate.Ppps.Ppp.Ipv4Network()
self.ipv4_network.parent = self
self.ipv4_packet_filter = DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter()
self.ipv4_packet_filter.parent = self
self.ipv6_neighbor = DynamicTemplate.Ppps.Ppp.Ipv6Neighbor()
self.ipv6_neighbor.parent = self
self.ipv6_network = DynamicTemplate.Ppps.Ppp.Ipv6Network()
self.ipv6_network.parent = self
self.ipv6_packet_filter = DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter()
self.ipv6_packet_filter.parent = self
self.pbr = DynamicTemplate.Ppps.Ppp.Pbr()
self.pbr.parent = self
self.qos = DynamicTemplate.Ppps.Ppp.Qos()
self.qos.parent = self
self.span_monitor_sessions = DynamicTemplate.Ppps.Ppp.SpanMonitorSessions()
self.span_monitor_sessions.parent = self
self.vrf = None
class SpanMonitorSessions(object):
"""
Monitor Session container for this template
.. attribute:: span_monitor_session
Configuration for a particular class of Monitor Session
**type**\: list of :py:class:`SpanMonitorSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.SpanMonitorSessions.SpanMonitorSession>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.span_monitor_session = YList()
self.span_monitor_session.parent = self
self.span_monitor_session.name = 'span_monitor_session'
class SpanMonitorSession(object):
"""
Configuration for a particular class of Monitor
Session
.. attribute:: session_class <key>
Session Class
**type**\: :py:class:`SpanSessionClassEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_datatypes.SpanSessionClassEnum>`
.. attribute:: acl
Enable ACL matching for traffic mirroring
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: attachment
Attach the interface to a Monitor Session
**type**\: :py:class:`Attachment <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.SpanMonitorSessions.SpanMonitorSession.Attachment>`
**presence node**\: True
.. attribute:: mirror_first
Mirror a specified number of bytes from start of packet
**type**\: int
**range:** 1..10000
**units**\: byte
.. attribute:: mirror_interval
Specify the mirror interval
**type**\: :py:class:`SpanMirrorIntervalEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_subscriber_cfg.SpanMirrorIntervalEnum>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.session_class = None
self.acl = None
self.attachment = None
self.mirror_first = None
self.mirror_interval = None
class Attachment(object):
"""
Attach the interface to a Monitor Session
.. attribute:: direction
Specify the direction of traffic to replicate (optional)
**type**\: :py:class:`SpanTrafficDirectionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_subscriber_cfg.SpanTrafficDirectionEnum>`
.. attribute:: port_level_enable
Enable port level traffic mirroring
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: session_name
Session Name
**type**\: str
**length:** 0..79
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.direction = None
self.port_level_enable = None
self.session_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:attachment'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.direction is not None:
return True
if self.port_level_enable is not None:
return True
if self.session_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.SpanMonitorSessions.SpanMonitorSession.Attachment']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.session_class is None:
raise YPYModelError('Key property session_class is None')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:span-monitor-session[Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:session-class = ' + str(self.session_class) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.session_class is not None:
return True
if self.acl is not None:
return True
if self.attachment is not None and self.attachment._has_data():
return True
if self.mirror_first is not None:
return True
if self.mirror_interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.SpanMonitorSessions.SpanMonitorSession']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:span-monitor-sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.span_monitor_session is not None:
for child_ref in self.span_monitor_session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.SpanMonitorSessions']['meta_info']
class Ipv4PacketFilter(object):
"""
IPv4 Packet Filtering configuration for the
template
.. attribute:: inbound
IPv4 Packet filter to be applied to inbound packets
**type**\: :py:class:`Inbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter.Inbound>`
.. attribute:: outbound
IPv4 Packet filter to be applied to outbound packets
**type**\: :py:class:`Outbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter.Outbound>`
**presence node**\: True
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.inbound = DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter.Inbound()
self.inbound.parent = self
self.outbound = None
class Outbound(object):
"""
IPv4 Packet filter to be applied to outbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: hardware_count
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv4 Packet Filter Name to be applied to Outbound packets
**type**\: str
**length:** 0..65
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.common_acl_name = None
self.hardware_count = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:outbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.common_acl_name is not None:
return True
if self.hardware_count is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter.Outbound']['meta_info']
class Inbound(object):
"""
IPv4 Packet filter to be applied to inbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: hardware_count
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv4 Packet Filter Name to be applied to Inbound packets NOTE\: This parameter is mandatory if 'CommonACLName' is not specified
**type**\: str
**length:** 0..65
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.common_acl_name = None
self.hardware_count = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:inbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.common_acl_name is not None:
return True
if self.hardware_count is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter.Inbound']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:ipv4-packet-filter'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inbound is not None and self.inbound._has_data():
return True
if self.outbound is not None and self.outbound._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv4PacketFilter']['meta_info']
class Ipv6PacketFilter(object):
"""
IPv6 Packet Filtering configuration for the
interface
.. attribute:: inbound
IPv6 Packet filter to be applied to inbound packets
**type**\: :py:class:`Inbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter.Inbound>`
.. attribute:: outbound
IPv6 Packet filter to be applied to outbound packets
**type**\: :py:class:`Outbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter.Outbound>`
**presence node**\: True
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.inbound = DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter.Inbound()
self.inbound.parent = self
self.outbound = None
class Inbound(object):
"""
IPv6 Packet filter to be applied to inbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv6 Packet Filter Name to be applied to Inbound NOTE\: This parameter is mandatory if 'CommonACLName' is not specified
**type**\: str
**length:** 0..65
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.common_acl_name = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:inbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.common_acl_name is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter.Inbound']['meta_info']
class Outbound(object):
"""
IPv6 Packet filter to be applied to outbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv6 Packet Filter Name to be applied to Outbound packets
**type**\: str
**length:** 0..65
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.common_acl_name = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:outbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.common_acl_name is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter.Outbound']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:ipv6-packet-filter'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inbound is not None and self.inbound._has_data():
return True
if self.outbound is not None and self.outbound._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6PacketFilter']['meta_info']
class Igmp(object):
"""
IGMPconfiguration
.. attribute:: default_vrf
Default VRF
**type**\: :py:class:`DefaultVrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Igmp.DefaultVrf>`
"""
_prefix = 'ipv4-igmp-dyn-tmpl-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.default_vrf = DynamicTemplate.Ppps.Ppp.Igmp.DefaultVrf()
self.default_vrf.parent = self
class DefaultVrf(object):
"""
Default VRF
.. attribute:: access_group
Access list specifying access\-list group range
**type**\: str
.. attribute:: explicit_tracking
IGMPv3 explicit host tracking
**type**\: :py:class:`ExplicitTracking <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Igmp.DefaultVrf.ExplicitTracking>`
**presence node**\: True
.. attribute:: max_groups
IGMP Max Groups
**type**\: int
**range:** 1..40000
**default value**\: 25000
.. attribute:: multicast
Configure Multicast mode variable
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: query_interval
Query interval in seconds
**type**\: int
**range:** 1..3600
**default value**\: 60
.. attribute:: query_max_response_time
Query response value in seconds
**type**\: int
**range:** 1..12
**default value**\: 10
.. attribute:: robustness
Configure IGMP Robustness variable
**type**\: int
**range:** 2..10
**default value**\: 2
.. attribute:: version
IGMP Version
**type**\: int
**range:** 1..3
**default value**\: 3
"""
_prefix = 'ipv4-igmp-dyn-tmpl-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.access_group = None
self.explicit_tracking = None
self.max_groups = None
self.multicast = None
self.query_interval = None
self.query_max_response_time = None
self.robustness = None
self.version = None
class ExplicitTracking(object):
"""
IGMPv3 explicit host tracking
.. attribute:: access_list_name
Access list specifying tracking group range
**type**\: str
.. attribute:: enable
Enable or disable, when value is TRUE or FALSE respectively
**type**\: bool
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv4-igmp-dyn-tmpl-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.access_list_name = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-igmp-dyn-tmpl-cfg:explicit-tracking'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.access_list_name is not None:
return True
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Igmp.DefaultVrf.ExplicitTracking']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-igmp-dyn-tmpl-cfg:default-vrf'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.access_group is not None:
return True
if self.explicit_tracking is not None and self.explicit_tracking._has_data():
return True
if self.max_groups is not None:
return True
if self.multicast is not None:
return True
if self.query_interval is not None:
return True
if self.query_max_response_time is not None:
return True
if self.robustness is not None:
return True
if self.version is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Igmp.DefaultVrf']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-igmp-dyn-tmpl-cfg:igmp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.default_vrf is not None and self.default_vrf._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Igmp']['meta_info']
class Ipv4Network(object):
"""
Interface IPv4 Network configuration data
.. attribute:: mtu
The IP Maximum Transmission Unit
**type**\: int
**range:** 68..65535
**units**\: byte
.. attribute:: rpf
TRUE if enabled, FALSE if disabled
**type**\: bool
**default value**\: true
.. attribute:: unnumbered
Enable IP processing without an explicit address
**type**\: str
.. attribute:: unreachables
TRUE if enabled, FALSE if disabled
**type**\: bool
**default value**\: false
"""
_prefix = 'ipv4-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.mtu = None
self.rpf = None
self.unnumbered = None
self.unreachables = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-ma-subscriber-cfg:ipv4-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mtu is not None:
return True
if self.rpf is not None:
return True
if self.unnumbered is not None:
return True
if self.unreachables is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv4Network']['meta_info']
class Ipv6Network(object):
"""
Interface IPv6 Network configuration data
.. attribute:: addresses
Set the IPv6 address of an interface
**type**\: :py:class:`Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6Network.Addresses>`
.. attribute:: mtu
MTU Setting of Interface
**type**\: int
**range:** 1280..65535
**units**\: byte
.. attribute:: unreachables
Override Sending of ICMP Unreachable Messages
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: verify
IPv6 Verify Unicast Souce Reachable
**type**\: :py:class:`Ipv6ReachableViaEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_ma_subscriber_cfg.Ipv6ReachableViaEnum>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.addresses = DynamicTemplate.Ppps.Ppp.Ipv6Network.Addresses()
self.addresses.parent = self
self.mtu = None
self.unreachables = None
self.verify = None
class Addresses(object):
"""
Set the IPv6 address of an interface
.. attribute:: auto_configuration
Auto IPv6 Interface Configuration
**type**\: :py:class:`AutoConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6Network.Addresses.AutoConfiguration>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.auto_configuration = DynamicTemplate.Ppps.Ppp.Ipv6Network.Addresses.AutoConfiguration()
self.auto_configuration.parent = self
class AutoConfiguration(object):
"""
Auto IPv6 Interface Configuration
.. attribute:: enable
The flag to enable auto ipv6 interface configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:auto-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6Network.Addresses.AutoConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.auto_configuration is not None and self.auto_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6Network.Addresses']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:ipv6-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addresses is not None and self.addresses._has_data():
return True
if self.mtu is not None:
return True
if self.unreachables is not None:
return True
if self.verify is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6Network']['meta_info']
class Ipv6Neighbor(object):
"""
Interface IPv6 Network configuration data
.. attribute:: duplicate_address_detection
Duplicate Address Detection (DAD)
**type**\: :py:class:`DuplicateAddressDetection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6Neighbor.DuplicateAddressDetection>`
.. attribute:: framed_prefix
Set the IPv6 framed ipv6 prefix for a subscriber interface
**type**\: :py:class:`FramedPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6Neighbor.FramedPrefix>`
**presence node**\: True
.. attribute:: framed_prefix_pool
Set the IPv6 framed ipv6 prefix pool for a subscriber interface
**type**\: str
.. attribute:: managed_config
Host to use stateful protocol for address configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ns_interval
Set advertised NS retransmission interval in milliseconds
**type**\: int
**range:** 1000..3600000
**units**\: millisecond
.. attribute:: nud_enable
NUD enable
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: other_config
Host to use stateful protocol for non\-address configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_hop_limit
IPv6 ND RA HopLimit
**type**\: :py:class:`Ipv6NdHopLimitEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_nd_subscriber_cfg.Ipv6NdHopLimitEnum>`
.. attribute:: ra_initial
IPv6 ND RA Initial
**type**\: :py:class:`RaInitial <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Ipv6Neighbor.RaInitial>`
**presence node**\: True
.. attribute:: ra_interval
Set IPv6 Router Advertisement (RA) interval in seconds
**type**\: int
**range:** 3..1800
**units**\: second
.. attribute:: ra_lifetime
Set IPv6 Router Advertisement (RA) lifetime in seconds
**type**\: int
**range:** 0..9000
**units**\: second
.. attribute:: ra_suppress
Enable suppress IPv6 router advertisement
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_suppress_mtu
RA suppress MTU flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_unicast
Enable RA unicast Flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: reachable_time
Set advertised reachability time in milliseconds
**type**\: int
**range:** 0..3600000
**units**\: millisecond
.. attribute:: router_preference
RA Router Preference
**type**\: :py:class:`Ipv6NdRouterPrefTemplateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_nd_subscriber_cfg.Ipv6NdRouterPrefTemplateEnum>`
.. attribute:: start_ra_on_ipv6_enable
Start RA on ipv6\-enable config
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: suppress_cache_learning
Suppress cache learning flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.duplicate_address_detection = DynamicTemplate.Ppps.Ppp.Ipv6Neighbor.DuplicateAddressDetection()
self.duplicate_address_detection.parent = self
self.framed_prefix = None
self.framed_prefix_pool = None
self.managed_config = None
self.ns_interval = None
self.nud_enable = None
self.other_config = None
self.ra_hop_limit = None
self.ra_initial = None
self.ra_interval = None
self.ra_lifetime = None
self.ra_suppress = None
self.ra_suppress_mtu = None
self.ra_unicast = None
self.reachable_time = None
self.router_preference = None
self.start_ra_on_ipv6_enable = None
self.suppress_cache_learning = None
class FramedPrefix(object):
"""
Set the IPv6 framed ipv6 prefix for a
subscriber interface
.. attribute:: prefix
IPV6 framed prefix address
**type**\: str
**mandatory**\: True
.. attribute:: prefix_length
IPv6 framed prefix length
**type**\: int
**range:** 0..128
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.prefix = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:framed-prefix'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.prefix is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6Neighbor.FramedPrefix']['meta_info']
class DuplicateAddressDetection(object):
"""
Duplicate Address Detection (DAD)
.. attribute:: attempts
Set IPv6 duplicate address detection transmits
**type**\: int
**range:** 0..600
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.attempts = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:duplicate-address-detection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.attempts is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6Neighbor.DuplicateAddressDetection']['meta_info']
class RaInitial(object):
"""
IPv6 ND RA Initial
.. attribute:: count
Initial RA count
**type**\: int
**range:** 0..32
**mandatory**\: True
.. attribute:: interval
Initial RA interval in seconds
**type**\: int
**range:** 4..1800
**mandatory**\: True
**units**\: second
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.count = None
self.interval = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:ra-initial'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.count is not None:
return True
if self.interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6Neighbor.RaInitial']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:ipv6-neighbor'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.duplicate_address_detection is not None and self.duplicate_address_detection._has_data():
return True
if self.framed_prefix is not None and self.framed_prefix._has_data():
return True
if self.framed_prefix_pool is not None:
return True
if self.managed_config is not None:
return True
if self.ns_interval is not None:
return True
if self.nud_enable is not None:
return True
if self.other_config is not None:
return True
if self.ra_hop_limit is not None:
return True
if self.ra_initial is not None and self.ra_initial._has_data():
return True
if self.ra_interval is not None:
return True
if self.ra_lifetime is not None:
return True
if self.ra_suppress is not None:
return True
if self.ra_suppress_mtu is not None:
return True
if self.ra_unicast is not None:
return True
if self.reachable_time is not None:
return True
if self.router_preference is not None:
return True
if self.start_ra_on_ipv6_enable is not None:
return True
if self.suppress_cache_learning is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Ipv6Neighbor']['meta_info']
class Dhcpv6(object):
"""
Interface dhcpv6 configuration data
.. attribute:: address_pool
The pool to be used for Address assignment
**type**\: str
.. attribute:: class_
The class to be used for proxy/server profile
**type**\: str
.. attribute:: delegated_prefix
The prefix to be used for Prefix Delegation
**type**\: :py:class:`DelegatedPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Dhcpv6.DelegatedPrefix>`
**presence node**\: True
.. attribute:: delegated_prefix_pool
The pool to be used for Prefix Delegation
**type**\: str
.. attribute:: dns_ipv6address
Dns IPv6 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: stateful_address
Stateful IPv6 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ipv6-new-dhcpv6d-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address_pool = None
self.class_ = None
self.delegated_prefix = None
self.delegated_prefix_pool = None
self.dns_ipv6address = None
self.stateful_address = None
class DelegatedPrefix(object):
"""
The prefix to be used for Prefix Delegation
.. attribute:: prefix
IPv6 Prefix
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
.. attribute:: prefix_length
PD Prefix Length
**type**\: int
**range:** 0..128
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-new-dhcpv6d-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.prefix = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-new-dhcpv6d-subscriber-cfg:delegated-prefix'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.prefix is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Dhcpv6.DelegatedPrefix']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-new-dhcpv6d-subscriber-cfg:dhcpv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address_pool is not None:
return True
if self.class_ is not None:
return True
if self.delegated_prefix is not None and self.delegated_prefix._has_data():
return True
if self.delegated_prefix_pool is not None:
return True
if self.dns_ipv6address is not None:
return True
if self.stateful_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Dhcpv6']['meta_info']
class Pbr(object):
"""
Dynamic Template PBR configuration
.. attribute:: service_policy
PBR service policy configuration
**type**\: :py:class:`ServicePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Pbr.ServicePolicy>`
.. attribute:: service_policy_in
Class for subscriber ingress policy
**type**\: str
"""
_prefix = 'pbr-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.service_policy = DynamicTemplate.Ppps.Ppp.Pbr.ServicePolicy()
self.service_policy.parent = self
self.service_policy_in = None
class ServicePolicy(object):
"""
PBR service policy configuration
.. attribute:: input
Ingress service policy
**type**\: str
"""
_prefix = 'pbr-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.input = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-pbr-subscriber-cfg:service-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.input is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Pbr.ServicePolicy']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-pbr-subscriber-cfg:pbr'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.service_policy is not None and self.service_policy._has_data():
return True
if self.service_policy_in is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Pbr']['meta_info']
class Qos(object):
"""
QoS dynamically applied configuration template
.. attribute:: account
QoS L2 overhead accounting
**type**\: :py:class:`Account <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Qos.Account>`
.. attribute:: output
QoS to be applied in egress direction
**type**\: :py:class:`Output <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Qos.Output>`
.. attribute:: service_policy
Service policy to be applied in ingress/egress direction
**type**\: :py:class:`ServicePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Qos.ServicePolicy>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.account = DynamicTemplate.Ppps.Ppp.Qos.Account()
self.account.parent = self
self.output = DynamicTemplate.Ppps.Ppp.Qos.Output()
self.output.parent = self
self.service_policy = DynamicTemplate.Ppps.Ppp.Qos.ServicePolicy()
self.service_policy.parent = self
class ServicePolicy(object):
"""
Service policy to be applied in ingress/egress
direction
.. attribute:: input
Subscriber ingress policy
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Qos.ServicePolicy.Input>`
**presence node**\: True
.. attribute:: output
Subscriber egress policy
**type**\: :py:class:`Output <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.Ppps.Ppp.Qos.ServicePolicy.Output>`
**presence node**\: True
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.input = None
self.output = None
class Input(object):
"""
Subscriber ingress policy
.. attribute:: account_stats
TRUE for account stats enabled for service\-policy applied on dynamic template. Note\: account stats not supported for subscriber type 'ppp' and 'ipsubscriber'
**type**\: bool
.. attribute:: merge
TRUE for merge enabled for service\-policy applied on dynamic template
**type**\: bool
.. attribute:: merge_id
Merge ID value
**type**\: int
**range:** 0..255
.. attribute:: policy_name
Name of policy\-map
**type**\: str
**mandatory**\: True
.. attribute:: spi_name
Name of the SPI
**type**\: str
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self._is_presence = True
self.account_stats = None
self.merge = None
self.merge_id = None
self.policy_name = None
self.spi_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.account_stats is not None:
return True
if self.merge is not None:
return True
if self.merge_id is not None:
return True
if self.policy_name is not None:
return True
if self.spi_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Qos.ServicePolicy.Input']['meta_info']
class Output(object):
"""
Subscriber egress policy
.. attribute:: account_stats
TRUE for account stats enabled for service\-policy applied on dynamic template. Note\: account stats not supported for subscriber type 'ppp' and 'ipsubscriber'
**type**\: bool
.. attribute:: merge
TRUE for merge enabled for service\-policy applied on dynamic template
**type**\: bool
.. attribute:: merge_id
Merge ID value
**type**\: int
**range:** 0..255
.. attribute:: policy_name
Name of policy\-map
**type**\: str
**mandatory**\: True
.. attribute:: spi_name
Name of the SPI
**type**\: str
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self._is_presence = True
self.account_stats = None
self.merge = None
self.merge_id = None
self.policy_name = None
self.spi_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:output'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.account_stats is not None:
return True
if self.merge is not None:
return True
if self.merge_id is not None:
return True
if self.policy_name is not None:
return True
if self.spi_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Qos.ServicePolicy.Output']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:service-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.input is not None and self.input._has_data():
return True
if self.output is not None and self.output._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Qos.ServicePolicy']['meta_info']
class Account(object):
"""
QoS L2 overhead accounting
.. attribute:: aal
ATM adaptation layer AAL
**type**\: :py:class:`Qosl2DataLinkEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_qos_ma_bng_cfg.Qosl2DataLinkEnum>`
.. attribute:: atm_cell_tax
ATM cell tax to L2 overhead
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: encapsulation
Specify encapsulation type
**type**\: :py:class:`Qosl2EncapEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_qos_ma_bng_cfg.Qosl2EncapEnum>`
.. attribute:: user_defined
Numeric L2 overhead offset
**type**\: int
**range:** \-63..63
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.aal = None
self.atm_cell_tax = None
self.encapsulation = None
self.user_defined = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:account'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.aal is not None:
return True
if self.atm_cell_tax is not None:
return True
if self.encapsulation is not None:
return True
if self.user_defined is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Qos.Account']['meta_info']
class Output(object):
"""
QoS to be applied in egress direction
.. attribute:: minimum_bandwidth
Minimum bandwidth value for the subscriber (in kbps)
**type**\: int
**range:** 1..4294967295
**units**\: kbit/s
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.minimum_bandwidth = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:output'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.minimum_bandwidth is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Qos.Output']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:qos'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.account is not None and self.account._has_data():
return True
if self.output is not None and self.output._has_data():
return True
if self.service_policy is not None and self.service_policy._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp.Qos']['meta_info']
@property
def _common_path(self):
if self.template_name is None:
raise YPYModelError('Key property template_name is None')
return '/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:dynamic-template/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:ppps/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:ppp[Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:template-name = ' + str(self.template_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.template_name is not None:
return True
if self.dhcpv6 is not None and self.dhcpv6._has_data():
return True
if self.igmp is not None and self.igmp._has_data():
return True
if self.ipv4_network is not None and self.ipv4_network._has_data():
return True
if self.ipv4_packet_filter is not None and self.ipv4_packet_filter._has_data():
return True
if self.ipv6_neighbor is not None and self.ipv6_neighbor._has_data():
return True
if self.ipv6_network is not None and self.ipv6_network._has_data():
return True
if self.ipv6_packet_filter is not None and self.ipv6_packet_filter._has_data():
return True
if self.pbr is not None and self.pbr._has_data():
return True
if self.qos is not None and self.qos._has_data():
return True
if self.span_monitor_sessions is not None and self.span_monitor_sessions._has_data():
return True
if self.vrf is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps.Ppp']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:dynamic-template/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:ppps'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ppp is not None:
for child_ref in self.ppp:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.Ppps']['meta_info']
class IpSubscribers(object):
"""
The IP Subscriber Template Table
.. attribute:: ip_subscriber
A IP Subscriber Type Template
**type**\: list of :py:class:`IpSubscriber <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber>`
"""
_prefix = 'subscriber-infra-tmplmgr-cfg'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.ip_subscriber = YList()
self.ip_subscriber.parent = self
self.ip_subscriber.name = 'ip_subscriber'
class IpSubscriber(object):
"""
A IP Subscriber Type Template
.. attribute:: template_name <key>
The name of the template
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: dhcpv6
Interface dhcpv6 configuration data
**type**\: :py:class:`Dhcpv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Dhcpv6>`
.. attribute:: ipv4_network
Interface IPv4 Network configuration data
**type**\: :py:class:`Ipv4Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4Network>`
.. attribute:: ipv4_packet_filter
IPv4 Packet Filtering configuration for the template
**type**\: :py:class:`Ipv4PacketFilter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter>`
.. attribute:: ipv6_neighbor
Interface IPv6 Network configuration data
**type**\: :py:class:`Ipv6Neighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor>`
.. attribute:: ipv6_network
Interface IPv6 Network configuration data
**type**\: :py:class:`Ipv6Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network>`
.. attribute:: ipv6_packet_filter
IPv6 Packet Filtering configuration for the interface
**type**\: :py:class:`Ipv6PacketFilter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter>`
.. attribute:: pbr
Dynamic Template PBR configuration
**type**\: :py:class:`Pbr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Pbr>`
.. attribute:: qos
QoS dynamically applied configuration template
**type**\: :py:class:`Qos <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Qos>`
.. attribute:: span_monitor_sessions
Monitor Session container for this template
**type**\: :py:class:`SpanMonitorSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.SpanMonitorSessions>`
.. attribute:: vrf
Assign the interface to a VRF
**type**\: str
**length:** 0..32
"""
_prefix = 'subscriber-infra-tmplmgr-cfg'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.template_name = None
self.dhcpv6 = DynamicTemplate.IpSubscribers.IpSubscriber.Dhcpv6()
self.dhcpv6.parent = self
self.ipv4_network = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4Network()
self.ipv4_network.parent = self
self.ipv4_packet_filter = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter()
self.ipv4_packet_filter.parent = self
self.ipv6_neighbor = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor()
self.ipv6_neighbor.parent = self
self.ipv6_network = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network()
self.ipv6_network.parent = self
self.ipv6_packet_filter = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter()
self.ipv6_packet_filter.parent = self
self.pbr = DynamicTemplate.IpSubscribers.IpSubscriber.Pbr()
self.pbr.parent = self
self.qos = DynamicTemplate.IpSubscribers.IpSubscriber.Qos()
self.qos.parent = self
self.span_monitor_sessions = DynamicTemplate.IpSubscribers.IpSubscriber.SpanMonitorSessions()
self.span_monitor_sessions.parent = self
self.vrf = None
class SpanMonitorSessions(object):
"""
Monitor Session container for this template
.. attribute:: span_monitor_session
Configuration for a particular class of Monitor Session
**type**\: list of :py:class:`SpanMonitorSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.SpanMonitorSessions.SpanMonitorSession>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.span_monitor_session = YList()
self.span_monitor_session.parent = self
self.span_monitor_session.name = 'span_monitor_session'
class SpanMonitorSession(object):
"""
Configuration for a particular class of Monitor
Session
.. attribute:: session_class <key>
Session Class
**type**\: :py:class:`SpanSessionClassEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_datatypes.SpanSessionClassEnum>`
.. attribute:: acl
Enable ACL matching for traffic mirroring
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: attachment
Attach the interface to a Monitor Session
**type**\: :py:class:`Attachment <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.SpanMonitorSessions.SpanMonitorSession.Attachment>`
**presence node**\: True
.. attribute:: mirror_first
Mirror a specified number of bytes from start of packet
**type**\: int
**range:** 1..10000
**units**\: byte
.. attribute:: mirror_interval
Specify the mirror interval
**type**\: :py:class:`SpanMirrorIntervalEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_subscriber_cfg.SpanMirrorIntervalEnum>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.session_class = None
self.acl = None
self.attachment = None
self.mirror_first = None
self.mirror_interval = None
class Attachment(object):
"""
Attach the interface to a Monitor Session
.. attribute:: direction
Specify the direction of traffic to replicate (optional)
**type**\: :py:class:`SpanTrafficDirectionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_subscriber_cfg.SpanTrafficDirectionEnum>`
.. attribute:: port_level_enable
Enable port level traffic mirroring
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: session_name
Session Name
**type**\: str
**length:** 0..79
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.direction = None
self.port_level_enable = None
self.session_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:attachment'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.direction is not None:
return True
if self.port_level_enable is not None:
return True
if self.session_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.SpanMonitorSessions.SpanMonitorSession.Attachment']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.session_class is None:
raise YPYModelError('Key property session_class is None')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:span-monitor-session[Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:session-class = ' + str(self.session_class) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.session_class is not None:
return True
if self.acl is not None:
return True
if self.attachment is not None and self.attachment._has_data():
return True
if self.mirror_first is not None:
return True
if self.mirror_interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.SpanMonitorSessions.SpanMonitorSession']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:span-monitor-sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.span_monitor_session is not None:
for child_ref in self.span_monitor_session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.SpanMonitorSessions']['meta_info']
class Ipv4PacketFilter(object):
"""
IPv4 Packet Filtering configuration for the
template
.. attribute:: inbound
IPv4 Packet filter to be applied to inbound packets
**type**\: :py:class:`Inbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter.Inbound>`
.. attribute:: outbound
IPv4 Packet filter to be applied to outbound packets
**type**\: :py:class:`Outbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter.Outbound>`
**presence node**\: True
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.inbound = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter.Inbound()
self.inbound.parent = self
self.outbound = None
class Outbound(object):
"""
IPv4 Packet filter to be applied to outbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: hardware_count
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv4 Packet Filter Name to be applied to Outbound packets
**type**\: str
**length:** 0..65
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.common_acl_name = None
self.hardware_count = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:outbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.common_acl_name is not None:
return True
if self.hardware_count is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter.Outbound']['meta_info']
class Inbound(object):
"""
IPv4 Packet filter to be applied to inbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: hardware_count
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv4 Packet Filter Name to be applied to Inbound packets NOTE\: This parameter is mandatory if 'CommonACLName' is not specified
**type**\: str
**length:** 0..65
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.common_acl_name = None
self.hardware_count = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:inbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.common_acl_name is not None:
return True
if self.hardware_count is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter.Inbound']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:ipv4-packet-filter'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inbound is not None and self.inbound._has_data():
return True
if self.outbound is not None and self.outbound._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4PacketFilter']['meta_info']
class Ipv6PacketFilter(object):
"""
IPv6 Packet Filtering configuration for the
interface
.. attribute:: inbound
IPv6 Packet filter to be applied to inbound packets
**type**\: :py:class:`Inbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter.Inbound>`
.. attribute:: outbound
IPv6 Packet filter to be applied to outbound packets
**type**\: :py:class:`Outbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter.Outbound>`
**presence node**\: True
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.inbound = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter.Inbound()
self.inbound.parent = self
self.outbound = None
class Inbound(object):
"""
IPv6 Packet filter to be applied to inbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv6 Packet Filter Name to be applied to Inbound NOTE\: This parameter is mandatory if 'CommonACLName' is not specified
**type**\: str
**length:** 0..65
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.common_acl_name = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:inbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.common_acl_name is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter.Inbound']['meta_info']
class Outbound(object):
"""
IPv6 Packet filter to be applied to outbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv6 Packet Filter Name to be applied to Outbound packets
**type**\: str
**length:** 0..65
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.common_acl_name = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:outbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.common_acl_name is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter.Outbound']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:ipv6-packet-filter'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inbound is not None and self.inbound._has_data():
return True
if self.outbound is not None and self.outbound._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6PacketFilter']['meta_info']
class Ipv4Network(object):
"""
Interface IPv4 Network configuration data
.. attribute:: mtu
The IP Maximum Transmission Unit
**type**\: int
**range:** 68..65535
**units**\: byte
.. attribute:: rpf
TRUE if enabled, FALSE if disabled
**type**\: bool
**default value**\: true
.. attribute:: unnumbered
Enable IP processing without an explicit address
**type**\: str
.. attribute:: unreachables
TRUE if enabled, FALSE if disabled
**type**\: bool
**default value**\: false
"""
_prefix = 'ipv4-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.mtu = None
self.rpf = None
self.unnumbered = None
self.unreachables = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-ma-subscriber-cfg:ipv4-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mtu is not None:
return True
if self.rpf is not None:
return True
if self.unnumbered is not None:
return True
if self.unreachables is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv4Network']['meta_info']
class Ipv6Network(object):
"""
Interface IPv6 Network configuration data
.. attribute:: addresses
Set the IPv6 address of an interface
**type**\: :py:class:`Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network.Addresses>`
.. attribute:: mtu
MTU Setting of Interface
**type**\: int
**range:** 1280..65535
**units**\: byte
.. attribute:: unreachables
Override Sending of ICMP Unreachable Messages
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: verify
IPv6 Verify Unicast Souce Reachable
**type**\: :py:class:`Ipv6ReachableViaEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_ma_subscriber_cfg.Ipv6ReachableViaEnum>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.addresses = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network.Addresses()
self.addresses.parent = self
self.mtu = None
self.unreachables = None
self.verify = None
class Addresses(object):
"""
Set the IPv6 address of an interface
.. attribute:: auto_configuration
Auto IPv6 Interface Configuration
**type**\: :py:class:`AutoConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network.Addresses.AutoConfiguration>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.auto_configuration = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network.Addresses.AutoConfiguration()
self.auto_configuration.parent = self
class AutoConfiguration(object):
"""
Auto IPv6 Interface Configuration
.. attribute:: enable
The flag to enable auto ipv6 interface configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:auto-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network.Addresses.AutoConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.auto_configuration is not None and self.auto_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network.Addresses']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:ipv6-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addresses is not None and self.addresses._has_data():
return True
if self.mtu is not None:
return True
if self.unreachables is not None:
return True
if self.verify is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Network']['meta_info']
class Ipv6Neighbor(object):
"""
Interface IPv6 Network configuration data
.. attribute:: duplicate_address_detection
Duplicate Address Detection (DAD)
**type**\: :py:class:`DuplicateAddressDetection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor.DuplicateAddressDetection>`
.. attribute:: framed_prefix
Set the IPv6 framed ipv6 prefix for a subscriber interface
**type**\: :py:class:`FramedPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor.FramedPrefix>`
**presence node**\: True
.. attribute:: framed_prefix_pool
Set the IPv6 framed ipv6 prefix pool for a subscriber interface
**type**\: str
.. attribute:: managed_config
Host to use stateful protocol for address configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ns_interval
Set advertised NS retransmission interval in milliseconds
**type**\: int
**range:** 1000..3600000
**units**\: millisecond
.. attribute:: nud_enable
NUD enable
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: other_config
Host to use stateful protocol for non\-address configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_hop_limit
IPv6 ND RA HopLimit
**type**\: :py:class:`Ipv6NdHopLimitEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_nd_subscriber_cfg.Ipv6NdHopLimitEnum>`
.. attribute:: ra_initial
IPv6 ND RA Initial
**type**\: :py:class:`RaInitial <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor.RaInitial>`
**presence node**\: True
.. attribute:: ra_interval
Set IPv6 Router Advertisement (RA) interval in seconds
**type**\: int
**range:** 3..1800
**units**\: second
.. attribute:: ra_lifetime
Set IPv6 Router Advertisement (RA) lifetime in seconds
**type**\: int
**range:** 0..9000
**units**\: second
.. attribute:: ra_suppress
Enable suppress IPv6 router advertisement
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_suppress_mtu
RA suppress MTU flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_unicast
Enable RA unicast Flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: reachable_time
Set advertised reachability time in milliseconds
**type**\: int
**range:** 0..3600000
**units**\: millisecond
.. attribute:: router_preference
RA Router Preference
**type**\: :py:class:`Ipv6NdRouterPrefTemplateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_nd_subscriber_cfg.Ipv6NdRouterPrefTemplateEnum>`
.. attribute:: start_ra_on_ipv6_enable
Start RA on ipv6\-enable config
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: suppress_cache_learning
Suppress cache learning flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.duplicate_address_detection = DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor.DuplicateAddressDetection()
self.duplicate_address_detection.parent = self
self.framed_prefix = None
self.framed_prefix_pool = None
self.managed_config = None
self.ns_interval = None
self.nud_enable = None
self.other_config = None
self.ra_hop_limit = None
self.ra_initial = None
self.ra_interval = None
self.ra_lifetime = None
self.ra_suppress = None
self.ra_suppress_mtu = None
self.ra_unicast = None
self.reachable_time = None
self.router_preference = None
self.start_ra_on_ipv6_enable = None
self.suppress_cache_learning = None
class FramedPrefix(object):
"""
Set the IPv6 framed ipv6 prefix for a
subscriber interface
.. attribute:: prefix
IPV6 framed prefix address
**type**\: str
**mandatory**\: True
.. attribute:: prefix_length
IPv6 framed prefix length
**type**\: int
**range:** 0..128
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.prefix = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:framed-prefix'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.prefix is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor.FramedPrefix']['meta_info']
class DuplicateAddressDetection(object):
"""
Duplicate Address Detection (DAD)
.. attribute:: attempts
Set IPv6 duplicate address detection transmits
**type**\: int
**range:** 0..600
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.attempts = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:duplicate-address-detection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.attempts is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor.DuplicateAddressDetection']['meta_info']
class RaInitial(object):
"""
IPv6 ND RA Initial
.. attribute:: count
Initial RA count
**type**\: int
**range:** 0..32
**mandatory**\: True
.. attribute:: interval
Initial RA interval in seconds
**type**\: int
**range:** 4..1800
**mandatory**\: True
**units**\: second
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.count = None
self.interval = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:ra-initial'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.count is not None:
return True
if self.interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor.RaInitial']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:ipv6-neighbor'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.duplicate_address_detection is not None and self.duplicate_address_detection._has_data():
return True
if self.framed_prefix is not None and self.framed_prefix._has_data():
return True
if self.framed_prefix_pool is not None:
return True
if self.managed_config is not None:
return True
if self.ns_interval is not None:
return True
if self.nud_enable is not None:
return True
if self.other_config is not None:
return True
if self.ra_hop_limit is not None:
return True
if self.ra_initial is not None and self.ra_initial._has_data():
return True
if self.ra_interval is not None:
return True
if self.ra_lifetime is not None:
return True
if self.ra_suppress is not None:
return True
if self.ra_suppress_mtu is not None:
return True
if self.ra_unicast is not None:
return True
if self.reachable_time is not None:
return True
if self.router_preference is not None:
return True
if self.start_ra_on_ipv6_enable is not None:
return True
if self.suppress_cache_learning is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Ipv6Neighbor']['meta_info']
class Dhcpv6(object):
"""
Interface dhcpv6 configuration data
.. attribute:: address_pool
The pool to be used for Address assignment
**type**\: str
.. attribute:: class_
The class to be used for proxy/server profile
**type**\: str
.. attribute:: delegated_prefix
The prefix to be used for Prefix Delegation
**type**\: :py:class:`DelegatedPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Dhcpv6.DelegatedPrefix>`
**presence node**\: True
.. attribute:: delegated_prefix_pool
The pool to be used for Prefix Delegation
**type**\: str
.. attribute:: dns_ipv6address
Dns IPv6 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: stateful_address
Stateful IPv6 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ipv6-new-dhcpv6d-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address_pool = None
self.class_ = None
self.delegated_prefix = None
self.delegated_prefix_pool = None
self.dns_ipv6address = None
self.stateful_address = None
class DelegatedPrefix(object):
"""
The prefix to be used for Prefix Delegation
.. attribute:: prefix
IPv6 Prefix
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**mandatory**\: True
.. attribute:: prefix_length
PD Prefix Length
**type**\: int
**range:** 0..128
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-new-dhcpv6d-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.prefix = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-new-dhcpv6d-subscriber-cfg:delegated-prefix'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.prefix is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Dhcpv6.DelegatedPrefix']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-new-dhcpv6d-subscriber-cfg:dhcpv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address_pool is not None:
return True
if self.class_ is not None:
return True
if self.delegated_prefix is not None and self.delegated_prefix._has_data():
return True
if self.delegated_prefix_pool is not None:
return True
if self.dns_ipv6address is not None:
return True
if self.stateful_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Dhcpv6']['meta_info']
class Pbr(object):
"""
Dynamic Template PBR configuration
.. attribute:: service_policy
PBR service policy configuration
**type**\: :py:class:`ServicePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Pbr.ServicePolicy>`
.. attribute:: service_policy_in
Class for subscriber ingress policy
**type**\: str
"""
_prefix = 'pbr-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.service_policy = DynamicTemplate.IpSubscribers.IpSubscriber.Pbr.ServicePolicy()
self.service_policy.parent = self
self.service_policy_in = None
class ServicePolicy(object):
"""
PBR service policy configuration
.. attribute:: input
Ingress service policy
**type**\: str
"""
_prefix = 'pbr-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.input = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-pbr-subscriber-cfg:service-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.input is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Pbr.ServicePolicy']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-pbr-subscriber-cfg:pbr'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.service_policy is not None and self.service_policy._has_data():
return True
if self.service_policy_in is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Pbr']['meta_info']
class Qos(object):
"""
QoS dynamically applied configuration template
.. attribute:: account
QoS L2 overhead accounting
**type**\: :py:class:`Account <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Qos.Account>`
.. attribute:: output
QoS to be applied in egress direction
**type**\: :py:class:`Output <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Qos.Output>`
.. attribute:: service_policy
Service policy to be applied in ingress/egress direction
**type**\: :py:class:`ServicePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Qos.ServicePolicy>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.account = DynamicTemplate.IpSubscribers.IpSubscriber.Qos.Account()
self.account.parent = self
self.output = DynamicTemplate.IpSubscribers.IpSubscriber.Qos.Output()
self.output.parent = self
self.service_policy = DynamicTemplate.IpSubscribers.IpSubscriber.Qos.ServicePolicy()
self.service_policy.parent = self
class ServicePolicy(object):
"""
Service policy to be applied in ingress/egress
direction
.. attribute:: input
Subscriber ingress policy
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Qos.ServicePolicy.Input>`
**presence node**\: True
.. attribute:: output
Subscriber egress policy
**type**\: :py:class:`Output <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.IpSubscribers.IpSubscriber.Qos.ServicePolicy.Output>`
**presence node**\: True
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.input = None
self.output = None
class Input(object):
"""
Subscriber ingress policy
.. attribute:: account_stats
TRUE for account stats enabled for service\-policy applied on dynamic template. Note\: account stats not supported for subscriber type 'ppp' and 'ipsubscriber'
**type**\: bool
.. attribute:: merge
TRUE for merge enabled for service\-policy applied on dynamic template
**type**\: bool
.. attribute:: merge_id
Merge ID value
**type**\: int
**range:** 0..255
.. attribute:: policy_name
Name of policy\-map
**type**\: str
**mandatory**\: True
.. attribute:: spi_name
Name of the SPI
**type**\: str
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self._is_presence = True
self.account_stats = None
self.merge = None
self.merge_id = None
self.policy_name = None
self.spi_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.account_stats is not None:
return True
if self.merge is not None:
return True
if self.merge_id is not None:
return True
if self.policy_name is not None:
return True
if self.spi_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Qos.ServicePolicy.Input']['meta_info']
class Output(object):
"""
Subscriber egress policy
.. attribute:: account_stats
TRUE for account stats enabled for service\-policy applied on dynamic template. Note\: account stats not supported for subscriber type 'ppp' and 'ipsubscriber'
**type**\: bool
.. attribute:: merge
TRUE for merge enabled for service\-policy applied on dynamic template
**type**\: bool
.. attribute:: merge_id
Merge ID value
**type**\: int
**range:** 0..255
.. attribute:: policy_name
Name of policy\-map
**type**\: str
**mandatory**\: True
.. attribute:: spi_name
Name of the SPI
**type**\: str
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self._is_presence = True
self.account_stats = None
self.merge = None
self.merge_id = None
self.policy_name = None
self.spi_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:output'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.account_stats is not None:
return True
if self.merge is not None:
return True
if self.merge_id is not None:
return True
if self.policy_name is not None:
return True
if self.spi_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Qos.ServicePolicy.Output']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:service-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.input is not None and self.input._has_data():
return True
if self.output is not None and self.output._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Qos.ServicePolicy']['meta_info']
class Account(object):
"""
QoS L2 overhead accounting
.. attribute:: aal
ATM adaptation layer AAL
**type**\: :py:class:`Qosl2DataLinkEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_qos_ma_bng_cfg.Qosl2DataLinkEnum>`
.. attribute:: atm_cell_tax
ATM cell tax to L2 overhead
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: encapsulation
Specify encapsulation type
**type**\: :py:class:`Qosl2EncapEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_qos_ma_bng_cfg.Qosl2EncapEnum>`
.. attribute:: user_defined
Numeric L2 overhead offset
**type**\: int
**range:** \-63..63
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.aal = None
self.atm_cell_tax = None
self.encapsulation = None
self.user_defined = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:account'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.aal is not None:
return True
if self.atm_cell_tax is not None:
return True
if self.encapsulation is not None:
return True
if self.user_defined is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Qos.Account']['meta_info']
class Output(object):
"""
QoS to be applied in egress direction
.. attribute:: minimum_bandwidth
Minimum bandwidth value for the subscriber (in kbps)
**type**\: int
**range:** 1..4294967295
**units**\: kbit/s
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.minimum_bandwidth = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:output'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.minimum_bandwidth is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Qos.Output']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:qos'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.account is not None and self.account._has_data():
return True
if self.output is not None and self.output._has_data():
return True
if self.service_policy is not None and self.service_policy._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber.Qos']['meta_info']
@property
def _common_path(self):
if self.template_name is None:
raise YPYModelError('Key property template_name is None')
return '/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:dynamic-template/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:ip-subscribers/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:ip-subscriber[Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:template-name = ' + str(self.template_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.template_name is not None:
return True
if self.dhcpv6 is not None and self.dhcpv6._has_data():
return True
if self.ipv4_network is not None and self.ipv4_network._has_data():
return True
if self.ipv4_packet_filter is not None and self.ipv4_packet_filter._has_data():
return True
if self.ipv6_neighbor is not None and self.ipv6_neighbor._has_data():
return True
if self.ipv6_network is not None and self.ipv6_network._has_data():
return True
if self.ipv6_packet_filter is not None and self.ipv6_packet_filter._has_data():
return True
if self.pbr is not None and self.pbr._has_data():
return True
if self.qos is not None and self.qos._has_data():
return True
if self.span_monitor_sessions is not None and self.span_monitor_sessions._has_data():
return True
if self.vrf is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers.IpSubscriber']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:dynamic-template/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:ip-subscribers'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip_subscriber is not None:
for child_ref in self.ip_subscriber:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.IpSubscribers']['meta_info']
class SubscriberServices(object):
"""
The Service Type Template Table
.. attribute:: subscriber_service
A Service Type Template
**type**\: list of :py:class:`SubscriberService <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService>`
"""
_prefix = 'subscriber-infra-tmplmgr-cfg'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.subscriber_service = YList()
self.subscriber_service.parent = self
self.subscriber_service.name = 'subscriber_service'
class SubscriberService(object):
"""
A Service Type Template
.. attribute:: template_name <key>
The name of the template
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: ipv4_network
Interface IPv4 Network configuration data
**type**\: :py:class:`Ipv4Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv4Network>`
.. attribute:: ipv4_packet_filter
IPv4 Packet Filtering configuration for the template
**type**\: :py:class:`Ipv4PacketFilter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter>`
.. attribute:: ipv6_neighbor
Interface IPv6 Network configuration data
**type**\: :py:class:`Ipv6Neighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor>`
.. attribute:: ipv6_network
Interface IPv6 Network configuration data
**type**\: :py:class:`Ipv6Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network>`
.. attribute:: ipv6_packet_filter
IPv6 Packet Filtering configuration for the interface
**type**\: :py:class:`Ipv6PacketFilter <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter>`
.. attribute:: pbr
Dynamic Template PBR configuration
**type**\: :py:class:`Pbr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Pbr>`
.. attribute:: qos
QoS dynamically applied configuration template
**type**\: :py:class:`Qos <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Qos>`
.. attribute:: span_monitor_sessions
Monitor Session container for this template
**type**\: :py:class:`SpanMonitorSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.SpanMonitorSessions>`
.. attribute:: vrf
Assign the interface to a VRF
**type**\: str
**length:** 0..32
"""
_prefix = 'subscriber-infra-tmplmgr-cfg'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.template_name = None
self.ipv4_network = DynamicTemplate.SubscriberServices.SubscriberService.Ipv4Network()
self.ipv4_network.parent = self
self.ipv4_packet_filter = DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter()
self.ipv4_packet_filter.parent = self
self.ipv6_neighbor = DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor()
self.ipv6_neighbor.parent = self
self.ipv6_network = DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network()
self.ipv6_network.parent = self
self.ipv6_packet_filter = DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter()
self.ipv6_packet_filter.parent = self
self.pbr = DynamicTemplate.SubscriberServices.SubscriberService.Pbr()
self.pbr.parent = self
self.qos = DynamicTemplate.SubscriberServices.SubscriberService.Qos()
self.qos.parent = self
self.span_monitor_sessions = DynamicTemplate.SubscriberServices.SubscriberService.SpanMonitorSessions()
self.span_monitor_sessions.parent = self
self.vrf = None
class SpanMonitorSessions(object):
"""
Monitor Session container for this template
.. attribute:: span_monitor_session
Configuration for a particular class of Monitor Session
**type**\: list of :py:class:`SpanMonitorSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.SpanMonitorSessions.SpanMonitorSession>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.span_monitor_session = YList()
self.span_monitor_session.parent = self
self.span_monitor_session.name = 'span_monitor_session'
class SpanMonitorSession(object):
"""
Configuration for a particular class of Monitor
Session
.. attribute:: session_class <key>
Session Class
**type**\: :py:class:`SpanSessionClassEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_datatypes.SpanSessionClassEnum>`
.. attribute:: acl
Enable ACL matching for traffic mirroring
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: attachment
Attach the interface to a Monitor Session
**type**\: :py:class:`Attachment <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.SpanMonitorSessions.SpanMonitorSession.Attachment>`
**presence node**\: True
.. attribute:: mirror_first
Mirror a specified number of bytes from start of packet
**type**\: int
**range:** 1..10000
**units**\: byte
.. attribute:: mirror_interval
Specify the mirror interval
**type**\: :py:class:`SpanMirrorIntervalEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_subscriber_cfg.SpanMirrorIntervalEnum>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.session_class = None
self.acl = None
self.attachment = None
self.mirror_first = None
self.mirror_interval = None
class Attachment(object):
"""
Attach the interface to a Monitor Session
.. attribute:: direction
Specify the direction of traffic to replicate (optional)
**type**\: :py:class:`SpanTrafficDirectionEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_Ethernet_SPAN_subscriber_cfg.SpanTrafficDirectionEnum>`
.. attribute:: port_level_enable
Enable port level traffic mirroring
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: session_name
Session Name
**type**\: str
**length:** 0..79
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ethernet-span-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.direction = None
self.port_level_enable = None
self.session_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:attachment'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.direction is not None:
return True
if self.port_level_enable is not None:
return True
if self.session_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.SpanMonitorSessions.SpanMonitorSession.Attachment']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.session_class is None:
raise YPYModelError('Key property session_class is None')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:span-monitor-session[Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:session-class = ' + str(self.session_class) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.session_class is not None:
return True
if self.acl is not None:
return True
if self.attachment is not None and self.attachment._has_data():
return True
if self.mirror_first is not None:
return True
if self.mirror_interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.SpanMonitorSessions.SpanMonitorSession']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-Ethernet-SPAN-subscriber-cfg:span-monitor-sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.span_monitor_session is not None:
for child_ref in self.span_monitor_session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.SpanMonitorSessions']['meta_info']
class Ipv4PacketFilter(object):
"""
IPv4 Packet Filtering configuration for the
template
.. attribute:: inbound
IPv4 Packet filter to be applied to inbound packets
**type**\: :py:class:`Inbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter.Inbound>`
.. attribute:: outbound
IPv4 Packet filter to be applied to outbound packets
**type**\: :py:class:`Outbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter.Outbound>`
**presence node**\: True
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.inbound = DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter.Inbound()
self.inbound.parent = self
self.outbound = None
class Outbound(object):
"""
IPv4 Packet filter to be applied to outbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: hardware_count
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv4 Packet Filter Name to be applied to Outbound packets
**type**\: str
**length:** 0..65
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.common_acl_name = None
self.hardware_count = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:outbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.common_acl_name is not None:
return True
if self.hardware_count is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter.Outbound']['meta_info']
class Inbound(object):
"""
IPv4 Packet filter to be applied to inbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: hardware_count
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv4 Packet Filter Name to be applied to Inbound packets NOTE\: This parameter is mandatory if 'CommonACLName' is not specified
**type**\: str
**length:** 0..65
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.common_acl_name = None
self.hardware_count = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:inbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.common_acl_name is not None:
return True
if self.hardware_count is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter.Inbound']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:ipv4-packet-filter'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inbound is not None and self.inbound._has_data():
return True
if self.outbound is not None and self.outbound._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv4PacketFilter']['meta_info']
class Ipv6PacketFilter(object):
"""
IPv6 Packet Filtering configuration for the
interface
.. attribute:: inbound
IPv6 Packet filter to be applied to inbound packets
**type**\: :py:class:`Inbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter.Inbound>`
.. attribute:: outbound
IPv6 Packet filter to be applied to outbound packets
**type**\: :py:class:`Outbound <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter.Outbound>`
**presence node**\: True
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.inbound = DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter.Inbound()
self.inbound.parent = self
self.outbound = None
class Inbound(object):
"""
IPv6 Packet filter to be applied to inbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv6 Packet Filter Name to be applied to Inbound NOTE\: This parameter is mandatory if 'CommonACLName' is not specified
**type**\: str
**length:** 0..65
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.common_acl_name = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:inbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.common_acl_name is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter.Inbound']['meta_info']
class Outbound(object):
"""
IPv6 Packet filter to be applied to outbound
packets
.. attribute:: common_acl_name
Not supported (Leave unspecified)
**type**\: str
.. attribute:: interface_statistics
Not supported (Leave unspecified)
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: name
IPv6 Packet Filter Name to be applied to Outbound packets
**type**\: str
**length:** 0..65
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-pfilter-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.common_acl_name = None
self.interface_statistics = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:outbound'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.common_acl_name is not None:
return True
if self.interface_statistics is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter.Outbound']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-pfilter-subscriber-cfg:ipv6-packet-filter'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inbound is not None and self.inbound._has_data():
return True
if self.outbound is not None and self.outbound._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6PacketFilter']['meta_info']
class Ipv4Network(object):
"""
Interface IPv4 Network configuration data
.. attribute:: mtu
The IP Maximum Transmission Unit
**type**\: int
**range:** 68..65535
**units**\: byte
.. attribute:: rpf
TRUE if enabled, FALSE if disabled
**type**\: bool
**default value**\: true
.. attribute:: unnumbered
Enable IP processing without an explicit address
**type**\: str
.. attribute:: unreachables
TRUE if enabled, FALSE if disabled
**type**\: bool
**default value**\: false
"""
_prefix = 'ipv4-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.mtu = None
self.rpf = None
self.unnumbered = None
self.unreachables = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-ma-subscriber-cfg:ipv4-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mtu is not None:
return True
if self.rpf is not None:
return True
if self.unnumbered is not None:
return True
if self.unreachables is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv4Network']['meta_info']
class Ipv6Network(object):
"""
Interface IPv6 Network configuration data
.. attribute:: addresses
Set the IPv6 address of an interface
**type**\: :py:class:`Addresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network.Addresses>`
.. attribute:: mtu
MTU Setting of Interface
**type**\: int
**range:** 1280..65535
**units**\: byte
.. attribute:: unreachables
Override Sending of ICMP Unreachable Messages
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: verify
IPv6 Verify Unicast Souce Reachable
**type**\: :py:class:`Ipv6ReachableViaEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_ma_subscriber_cfg.Ipv6ReachableViaEnum>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.addresses = DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network.Addresses()
self.addresses.parent = self
self.mtu = None
self.unreachables = None
self.verify = None
class Addresses(object):
"""
Set the IPv6 address of an interface
.. attribute:: auto_configuration
Auto IPv6 Interface Configuration
**type**\: :py:class:`AutoConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network.Addresses.AutoConfiguration>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.auto_configuration = DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network.Addresses.AutoConfiguration()
self.auto_configuration.parent = self
class AutoConfiguration(object):
"""
Auto IPv6 Interface Configuration
.. attribute:: enable
The flag to enable auto ipv6 interface configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'ipv6-ma-subscriber-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.enable = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:auto-configuration'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network.Addresses.AutoConfiguration']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:addresses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.auto_configuration is not None and self.auto_configuration._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network.Addresses']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-ma-subscriber-cfg:ipv6-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.addresses is not None and self.addresses._has_data():
return True
if self.mtu is not None:
return True
if self.unreachables is not None:
return True
if self.verify is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Network']['meta_info']
class Ipv6Neighbor(object):
"""
Interface IPv6 Network configuration data
.. attribute:: duplicate_address_detection
Duplicate Address Detection (DAD)
**type**\: :py:class:`DuplicateAddressDetection <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor.DuplicateAddressDetection>`
.. attribute:: framed_prefix
Set the IPv6 framed ipv6 prefix for a subscriber interface
**type**\: :py:class:`FramedPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor.FramedPrefix>`
**presence node**\: True
.. attribute:: framed_prefix_pool
Set the IPv6 framed ipv6 prefix pool for a subscriber interface
**type**\: str
.. attribute:: managed_config
Host to use stateful protocol for address configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ns_interval
Set advertised NS retransmission interval in milliseconds
**type**\: int
**range:** 1000..3600000
**units**\: millisecond
.. attribute:: nud_enable
NUD enable
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: other_config
Host to use stateful protocol for non\-address configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_hop_limit
IPv6 ND RA HopLimit
**type**\: :py:class:`Ipv6NdHopLimitEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_nd_subscriber_cfg.Ipv6NdHopLimitEnum>`
.. attribute:: ra_initial
IPv6 ND RA Initial
**type**\: :py:class:`RaInitial <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor.RaInitial>`
**presence node**\: True
.. attribute:: ra_interval
Set IPv6 Router Advertisement (RA) interval in seconds
**type**\: int
**range:** 3..1800
**units**\: second
.. attribute:: ra_lifetime
Set IPv6 Router Advertisement (RA) lifetime in seconds
**type**\: int
**range:** 0..9000
**units**\: second
.. attribute:: ra_suppress
Enable suppress IPv6 router advertisement
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_suppress_mtu
RA suppress MTU flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ra_unicast
Enable RA unicast Flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: reachable_time
Set advertised reachability time in milliseconds
**type**\: int
**range:** 0..3600000
**units**\: millisecond
.. attribute:: router_preference
RA Router Preference
**type**\: :py:class:`Ipv6NdRouterPrefTemplateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_nd_subscriber_cfg.Ipv6NdRouterPrefTemplateEnum>`
.. attribute:: start_ra_on_ipv6_enable
Start RA on ipv6\-enable config
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: suppress_cache_learning
Suppress cache learning flag
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.duplicate_address_detection = DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor.DuplicateAddressDetection()
self.duplicate_address_detection.parent = self
self.framed_prefix = None
self.framed_prefix_pool = None
self.managed_config = None
self.ns_interval = None
self.nud_enable = None
self.other_config = None
self.ra_hop_limit = None
self.ra_initial = None
self.ra_interval = None
self.ra_lifetime = None
self.ra_suppress = None
self.ra_suppress_mtu = None
self.ra_unicast = None
self.reachable_time = None
self.router_preference = None
self.start_ra_on_ipv6_enable = None
self.suppress_cache_learning = None
class FramedPrefix(object):
"""
Set the IPv6 framed ipv6 prefix for a
subscriber interface
.. attribute:: prefix
IPV6 framed prefix address
**type**\: str
**mandatory**\: True
.. attribute:: prefix_length
IPv6 framed prefix length
**type**\: int
**range:** 0..128
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.prefix = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:framed-prefix'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.prefix is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor.FramedPrefix']['meta_info']
class DuplicateAddressDetection(object):
"""
Duplicate Address Detection (DAD)
.. attribute:: attempts
Set IPv6 duplicate address detection transmits
**type**\: int
**range:** 0..600
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.attempts = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:duplicate-address-detection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.attempts is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor.DuplicateAddressDetection']['meta_info']
class RaInitial(object):
"""
IPv6 ND RA Initial
.. attribute:: count
Initial RA count
**type**\: int
**range:** 0..32
**mandatory**\: True
.. attribute:: interval
Initial RA interval in seconds
**type**\: int
**range:** 4..1800
**mandatory**\: True
**units**\: second
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv6-nd-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.count = None
self.interval = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:ra-initial'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.count is not None:
return True
if self.interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor.RaInitial']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv6-nd-subscriber-cfg:ipv6-neighbor'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.duplicate_address_detection is not None and self.duplicate_address_detection._has_data():
return True
if self.framed_prefix is not None and self.framed_prefix._has_data():
return True
if self.framed_prefix_pool is not None:
return True
if self.managed_config is not None:
return True
if self.ns_interval is not None:
return True
if self.nud_enable is not None:
return True
if self.other_config is not None:
return True
if self.ra_hop_limit is not None:
return True
if self.ra_initial is not None and self.ra_initial._has_data():
return True
if self.ra_interval is not None:
return True
if self.ra_lifetime is not None:
return True
if self.ra_suppress is not None:
return True
if self.ra_suppress_mtu is not None:
return True
if self.ra_unicast is not None:
return True
if self.reachable_time is not None:
return True
if self.router_preference is not None:
return True
if self.start_ra_on_ipv6_enable is not None:
return True
if self.suppress_cache_learning is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Ipv6Neighbor']['meta_info']
class Pbr(object):
"""
Dynamic Template PBR configuration
.. attribute:: service_policy
PBR service policy configuration
**type**\: :py:class:`ServicePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Pbr.ServicePolicy>`
.. attribute:: service_policy_in
Class for subscriber ingress policy
**type**\: str
"""
_prefix = 'pbr-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.service_policy = DynamicTemplate.SubscriberServices.SubscriberService.Pbr.ServicePolicy()
self.service_policy.parent = self
self.service_policy_in = None
class ServicePolicy(object):
"""
PBR service policy configuration
.. attribute:: input
Ingress service policy
**type**\: str
"""
_prefix = 'pbr-subscriber-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.input = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-pbr-subscriber-cfg:service-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.input is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Pbr.ServicePolicy']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-pbr-subscriber-cfg:pbr'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.service_policy is not None and self.service_policy._has_data():
return True
if self.service_policy_in is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Pbr']['meta_info']
class Qos(object):
"""
QoS dynamically applied configuration template
.. attribute:: account
QoS L2 overhead accounting
**type**\: :py:class:`Account <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Qos.Account>`
.. attribute:: output
QoS to be applied in egress direction
**type**\: :py:class:`Output <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Qos.Output>`
.. attribute:: service_policy
Service policy to be applied in ingress/egress direction
**type**\: :py:class:`ServicePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Qos.ServicePolicy>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.account = DynamicTemplate.SubscriberServices.SubscriberService.Qos.Account()
self.account.parent = self
self.output = DynamicTemplate.SubscriberServices.SubscriberService.Qos.Output()
self.output.parent = self
self.service_policy = DynamicTemplate.SubscriberServices.SubscriberService.Qos.ServicePolicy()
self.service_policy.parent = self
class ServicePolicy(object):
"""
Service policy to be applied in ingress/egress
direction
.. attribute:: input
Subscriber ingress policy
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Qos.ServicePolicy.Input>`
**presence node**\: True
.. attribute:: output
Subscriber egress policy
**type**\: :py:class:`Output <ydk.models.cisco_ios_xr.Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg.DynamicTemplate.SubscriberServices.SubscriberService.Qos.ServicePolicy.Output>`
**presence node**\: True
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.input = None
self.output = None
class Input(object):
"""
Subscriber ingress policy
.. attribute:: account_stats
TRUE for account stats enabled for service\-policy applied on dynamic template. Note\: account stats not supported for subscriber type 'ppp' and 'ipsubscriber'
**type**\: bool
.. attribute:: merge
TRUE for merge enabled for service\-policy applied on dynamic template
**type**\: bool
.. attribute:: merge_id
Merge ID value
**type**\: int
**range:** 0..255
.. attribute:: policy_name
Name of policy\-map
**type**\: str
**mandatory**\: True
.. attribute:: spi_name
Name of the SPI
**type**\: str
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self._is_presence = True
self.account_stats = None
self.merge = None
self.merge_id = None
self.policy_name = None
self.spi_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:input'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.account_stats is not None:
return True
if self.merge is not None:
return True
if self.merge_id is not None:
return True
if self.policy_name is not None:
return True
if self.spi_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Qos.ServicePolicy.Input']['meta_info']
class Output(object):
"""
Subscriber egress policy
.. attribute:: account_stats
TRUE for account stats enabled for service\-policy applied on dynamic template. Note\: account stats not supported for subscriber type 'ppp' and 'ipsubscriber'
**type**\: bool
.. attribute:: merge
TRUE for merge enabled for service\-policy applied on dynamic template
**type**\: bool
.. attribute:: merge_id
Merge ID value
**type**\: int
**range:** 0..255
.. attribute:: policy_name
Name of policy\-map
**type**\: str
**mandatory**\: True
.. attribute:: spi_name
Name of the SPI
**type**\: str
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self._is_presence = True
self.account_stats = None
self.merge = None
self.merge_id = None
self.policy_name = None
self.spi_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:output'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.account_stats is not None:
return True
if self.merge is not None:
return True
if self.merge_id is not None:
return True
if self.policy_name is not None:
return True
if self.spi_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Qos.ServicePolicy.Output']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:service-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.input is not None and self.input._has_data():
return True
if self.output is not None and self.output._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Qos.ServicePolicy']['meta_info']
class Account(object):
"""
QoS L2 overhead accounting
.. attribute:: aal
ATM adaptation layer AAL
**type**\: :py:class:`Qosl2DataLinkEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_qos_ma_bng_cfg.Qosl2DataLinkEnum>`
.. attribute:: atm_cell_tax
ATM cell tax to L2 overhead
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: encapsulation
Specify encapsulation type
**type**\: :py:class:`Qosl2EncapEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_qos_ma_bng_cfg.Qosl2EncapEnum>`
.. attribute:: user_defined
Numeric L2 overhead offset
**type**\: int
**range:** \-63..63
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.aal = None
self.atm_cell_tax = None
self.encapsulation = None
self.user_defined = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:account'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.aal is not None:
return True
if self.atm_cell_tax is not None:
return True
if self.encapsulation is not None:
return True
if self.user_defined is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Qos.Account']['meta_info']
class Output(object):
"""
QoS to be applied in egress direction
.. attribute:: minimum_bandwidth
Minimum bandwidth value for the subscriber (in kbps)
**type**\: int
**range:** 1..4294967295
**units**\: kbit/s
"""
_prefix = 'qos-ma-bng-cfg'
_revision = '2016-04-01'
def __init__(self):
self.parent = None
self.minimum_bandwidth = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:output'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.minimum_bandwidth is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Qos.Output']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-qos-ma-bng-cfg:qos'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.account is not None and self.account._has_data():
return True
if self.output is not None and self.output._has_data():
return True
if self.service_policy is not None and self.service_policy._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService.Qos']['meta_info']
@property
def _common_path(self):
if self.template_name is None:
raise YPYModelError('Key property template_name is None')
return '/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:dynamic-template/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:subscriber-services/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:subscriber-service[Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:template-name = ' + str(self.template_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.template_name is not None:
return True
if self.ipv4_network is not None and self.ipv4_network._has_data():
return True
if self.ipv4_packet_filter is not None and self.ipv4_packet_filter._has_data():
return True
if self.ipv6_neighbor is not None and self.ipv6_neighbor._has_data():
return True
if self.ipv6_network is not None and self.ipv6_network._has_data():
return True
if self.ipv6_packet_filter is not None and self.ipv6_packet_filter._has_data():
return True
if self.pbr is not None and self.pbr._has_data():
return True
if self.qos is not None and self.qos._has_data():
return True
if self.span_monitor_sessions is not None and self.span_monitor_sessions._has_data():
return True
if self.vrf is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices.SubscriberService']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:dynamic-template/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:subscriber-services'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.subscriber_service is not None:
for child_ref in self.subscriber_service:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate.SubscriberServices']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-subscriber-infra-tmplmgr-cfg:dynamic-template'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip_subscribers is not None and self.ip_subscribers._has_data():
return True
if self.ppps is not None and self.ppps._has_data():
return True
if self.subscriber_services is not None and self.subscriber_services._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_infra_tmplmgr_cfg as meta
return meta._meta_table['DynamicTemplate']['meta_info']
| 40.036089
| 298
| 0.453684
| 23,129
| 270,684
| 5.105322
| 0.016473
| 0.034417
| 0.043021
| 0.033875
| 0.97434
| 0.967133
| 0.962441
| 0.960722
| 0.957284
| 0.955861
| 0
| 0.013747
| 0.479171
| 270,684
| 6,760
| 299
| 40.042012
| 0.823826
| 0.257163
| 0
| 0.925373
| 0
| 0.003124
| 0.110162
| 0.074897
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15446
| false
| 0
| 0.032628
| 0.001388
| 0.490802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1b1017c6446e927aeef5fd5cd2045f80f298d788
| 8,494
|
py
|
Python
|
ACDC/preprocess1.py
|
lars76/segmentation_activations
|
e0a52e3d5309dc9aec396ff16afc5b565fd25c2f
|
[
"MIT"
] | null | null | null |
ACDC/preprocess1.py
|
lars76/segmentation_activations
|
e0a52e3d5309dc9aec396ff16afc5b565fd25c2f
|
[
"MIT"
] | null | null | null |
ACDC/preprocess1.py
|
lars76/segmentation_activations
|
e0a52e3d5309dc9aec396ff16afc5b565fd25c2f
|
[
"MIT"
] | null | null | null |
"""
Credits: https://github.com/liut969/Automated-Cardiac-Segmentation-and-Diagnosis/blob/master/roi.py
changed __main__
"""
from skimage.feature import peak_local_max, canny
from skimage.transform import hough_circle
from scipy.fftpack import fftn, ifftn
import numpy as np
import nibabel as nib
import csv
import os
class ROI(object):
def __init__(self, from_path,
save_path,
start_num,
end_num):
self.from_path = from_path
self.save_path = save_path
self.start_num = start_num
self.end_num = end_num
def load_nii(self, img_path):
nimg = nib.load(img_path)
return nimg.get_data(), nimg.affine, nimg.header
# Fourier-Hough Transform Based ROI Extraction
def extract_roi_fft(self, data4D, pixel_spacing, minradius_mm=15, maxradius_mm=45, kernel_width=5, center_margin=8, num_peaks=10, num_circles=20, radstep=2):
"""
Returns center and radii of ROI region in (i,j) format
"""
# Data shape:
# radius of the smallest and largest circles in mm estimated from the train set
# convert to pixel counts
pixel_spacing_X, pixel_spacing_Y, _,_ = pixel_spacing
minradius = int(minradius_mm / pixel_spacing_X)
maxradius = int(maxradius_mm / pixel_spacing_Y)
ximagesize = data4D.shape[0]
yimagesize = data4D.shape[1]
zslices = data4D.shape[2]
tframes = data4D.shape[3]
xsurface = np.tile(range(ximagesize), (yimagesize, 1)).T
ysurface = np.tile(range(yimagesize), (ximagesize, 1))
lsurface = np.zeros((ximagesize, yimagesize))
allcenters = []
allaccums = []
allradii = []
for slice in range(zslices):
ff1 = fftn([data4D[:,:,slice, t] for t in range(tframes)])
fh = np.absolute(ifftn(ff1[1, :, :]))
fh[fh < 0.1 * np.max(fh)] = 0.0
image = 1. * fh / np.max(fh)
# find hough circles and detect two radii
edges = canny(image, sigma=3)
hough_radii = np.arange(minradius, maxradius, radstep)
# print hough_radii
hough_res = hough_circle(edges, hough_radii)
if hough_res.any():
centers = []
accums = []
radii = []
for radius, h in zip(hough_radii, hough_res):
# For each radius, extract num_peaks circles
peaks = peak_local_max(h, num_peaks=num_peaks)
centers.extend(peaks)
accums.extend(h[peaks[:, 0], peaks[:, 1]])
radii.extend([radius] * num_peaks)
# Keep the most prominent num_circles circles
sorted_circles_idxs = np.argsort(accums)[::-1][:num_circles]
for idx in sorted_circles_idxs:
center_x, center_y = centers[idx]
allcenters.append(centers[idx])
allradii.append(radii[idx])
allaccums.append(accums[idx])
brightness = accums[idx]
lsurface = lsurface + brightness * np.exp(
-((xsurface - center_x) ** 2 + (ysurface - center_y) ** 2) / kernel_width ** 2)
lsurface = lsurface / lsurface.max()
# select most likely ROI center
roi_center = np.unravel_index(lsurface.argmax(), lsurface.shape)
# determine ROI radius
roi_x_radius = 0
roi_y_radius = 0
for idx in range(len(allcenters)):
xshift = np.abs(allcenters[idx][0] - roi_center[0])
yshift = np.abs(allcenters[idx][1] - roi_center[1])
if (xshift <= center_margin) & (yshift <= center_margin):
roi_x_radius = np.max((roi_x_radius, allradii[idx] + xshift))
roi_y_radius = np.max((roi_y_radius, allradii[idx] + yshift))
if roi_x_radius > 0 and roi_y_radius > 0:
roi_radii = roi_x_radius, roi_y_radius
else:
roi_radii = None
return roi_center, roi_radii
# Stddev-Hough Transform Based ROI Extraction
def extract_roi_stddev(self, data4D, pixel_spacing, minradius_mm=15, maxradius_mm=45, kernel_width=5, center_margin=8, num_peaks=10, num_circles=20, radstep=2):
"""
Returns center and radii of ROI region in (i,j) format
"""
# Data shape:
# radius of the smallest and largest circles in mm estimated from the train set
# convert to pixel counts
pixel_spacing_X, pixel_spacing_Y, _, _ = pixel_spacing
minradius = int(minradius_mm / pixel_spacing_X)
maxradius = int(maxradius_mm / pixel_spacing_Y)
ximagesize = data4D.shape[0]
yimagesize = data4D.shape[1]
zslices = data4D.shape[2]
tframes = data4D.shape[3]
xsurface = np.tile(range(ximagesize), (yimagesize, 1)).T
ysurface = np.tile(range(yimagesize), (ximagesize, 1))
lsurface = np.zeros((ximagesize, yimagesize))
allcenters = []
allaccums = []
allradii = []
for slice in range(zslices):
ff1 = np.array([data4D[:,:,slice, t] for t in range(tframes)])
fh = np.std(ff1, axis=0)
fh[fh < 0.1 * np.max(fh)] = 0.0
image = 1. * fh / np.max(fh)
# find hough circles and detect two radii
edges = canny(image, sigma=3)
hough_radii = np.arange(minradius, maxradius, radstep)
# print hough_radii
hough_res = hough_circle(edges, hough_radii)
if hough_res.any():
centers = []
accums = []
radii = []
for radius, h in zip(hough_radii, hough_res):
# For each radius, extract num_peaks circles
peaks = peak_local_max(h, num_peaks=num_peaks)
centers.extend(peaks)
accums.extend(h[peaks[:, 0], peaks[:, 1]])
radii.extend([radius] * num_peaks)
# Keep the most prominent num_circles circles
sorted_circles_idxs = np.argsort(accums)[::-1][:num_circles]
for idx in sorted_circles_idxs:
center_x, center_y = centers[idx]
allcenters.append(centers[idx])
allradii.append(radii[idx])
allaccums.append(accums[idx])
brightness = accums[idx]
lsurface = lsurface + brightness * np.exp(
-((xsurface - center_x) ** 2 + (ysurface - center_y) ** 2) / kernel_width ** 2)
lsurface = lsurface / lsurface.max()
# select most likely ROI center
roi_center = np.unravel_index(lsurface.argmax(), lsurface.shape)
# determine ROI radius
roi_x_radius = 0
roi_y_radius = 0
for idx in range(len(allcenters)):
xshift = np.abs(allcenters[idx][0] - roi_center[0])
yshift = np.abs(allcenters[idx][1] - roi_center[1])
if (xshift <= center_margin) & (yshift <= center_margin):
roi_x_radius = np.max((roi_x_radius, allradii[idx] + xshift))
roi_y_radius = np.max((roi_y_radius, allradii[idx] + yshift))
if roi_x_radius > 0 and roi_y_radius > 0:
roi_radii = roi_x_radius, roi_y_radius
else:
roi_radii = None
return roi_center, roi_radii
def save_csv(self):
center_radii_path = self.save_path
headers = ['image_path', 'center', 'radii']
rows = []
for i in range(self.start_num, self.end_num):
image_path_4D = os.path.join(self.from_path, 'patient%03d/patient%03d_4d.nii.gz' % (i, i))
image_4D, _, hdr, = self.load_nii(image_path_4D)
# c, r = self.extract_roi_stddev(image_4D, hdr.get_zooms())
c, r = self.extract_roi_fft(image_4D, hdr.get_zooms())
print(image_path_4D, c, r)
rows.append([image_path_4D, c, r])
with open(center_radii_path, 'w', newline='') as f:
f_csv = csv.writer(f)
f_csv.writerow(headers)
f_csv.writerows(rows)
if __name__ == '__main__':
roi_train = ROI('training', './train_center_radii.csv', 1, 101)
roi_train.save_csv()
#roi_test = ROI('../data/testing', './test_center_radii.csv', 101, 151)
#roi_test.save_csv()
| 41.033816
| 165
| 0.57523
| 1,056
| 8,494
| 4.407197
| 0.183712
| 0.030941
| 0.021487
| 0.015471
| 0.805114
| 0.774817
| 0.774817
| 0.774817
| 0.755479
| 0.755479
| 0
| 0.020218
| 0.318696
| 8,494
| 207
| 166
| 41.033816
| 0.783999
| 0.127973
| 0
| 0.712329
| 0
| 0
| 0.012957
| 0.007774
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034247
| false
| 0
| 0.047945
| 0
| 0.109589
| 0.006849
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1b79623671c9933766596e717331f6ed4a323b33
| 156
|
py
|
Python
|
fmdtools/resultdisp/__init__.py
|
DesignEngrLab/fmdkit
|
2d87c415c036f44fe10310500788f5ab697e618d
|
[
"MIT"
] | 8
|
2020-07-10T19:58:11.000Z
|
2022-01-06T18:47:09.000Z
|
fmdtools/resultdisp/__init__.py
|
nasa/fmdtools
|
7415068776998ff05eb199c78531ee7f9c2422e7
|
[
"MIT"
] | 10
|
2020-07-10T23:17:59.000Z
|
2021-05-20T13:31:43.000Z
|
fmdtools/resultdisp/__init__.py
|
nasa/fmdtools
|
7415068776998ff05eb199c78531ee7f9c2422e7
|
[
"MIT"
] | 3
|
2020-02-06T21:02:20.000Z
|
2022-03-01T08:59:37.000Z
|
from fmdtools.resultdisp import tabulate
from fmdtools.resultdisp import process
from fmdtools.resultdisp import graph
from fmdtools.resultdisp import plot
| 31.2
| 40
| 0.871795
| 20
| 156
| 6.8
| 0.4
| 0.352941
| 0.647059
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 156
| 5
| 41
| 31.2
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1bbb1e3c6be9f17d4fcbdf4944dfb366cecf6d04
| 1,319
|
py
|
Python
|
test/bibliopixel/layout/layout_matrix_test.py
|
rec/leds
|
ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a
|
[
"MIT"
] | 253
|
2015-01-03T23:17:57.000Z
|
2021-12-14T02:31:08.000Z
|
test/bibliopixel/layout/layout_matrix_test.py
|
rec/leds
|
ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a
|
[
"MIT"
] | 879
|
2015-01-11T16:07:25.000Z
|
2021-12-10T16:24:31.000Z
|
test/bibliopixel/layout/layout_matrix_test.py
|
rec/leds
|
ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a
|
[
"MIT"
] | 71
|
2015-01-04T01:02:47.000Z
|
2022-03-25T18:30:10.000Z
|
import unittest
from bibliopixel.layout.geometry import matrix
class MatrixTest(unittest.TestCase):
def test_simple(self):
m = matrix.Matrix(list(range(12)), 3)
self.assertEqual(m.get(0, 0), 0)
self.assertEqual(m.get(1, 0), 1)
self.assertEqual(m.get(0, 1), 3)
self.assertEqual(m.get(1, 1), 4)
self.assertEqual(m.get(2, 3), 11)
def test_transpose(self):
m = matrix.Matrix(list(range(12)), 3, transpose=True)
self.assertEqual(m.get(0, 0), 0)
self.assertEqual(m.get(0, 1), 1)
self.assertEqual(m.get(1, 0), 3)
self.assertEqual(m.get(1, 1), 4)
self.assertEqual(m.get(3, 2), 11)
def test_reflect(self):
m = matrix.Matrix(list(range(12)), 3, reflect_x=True)
self.assertEqual(m.get(0, 0), 2)
self.assertEqual(m.get(1, 0), 1)
self.assertEqual(m.get(0, 1), 5)
self.assertEqual(m.get(1, 1), 4)
self.assertEqual(m.get(2, 3), 9)
def test_serpentine(self):
m = matrix.Matrix(list(range(12)), 3, serpentine_x=True)
self.assertEqual(m.get(0, 0), 0)
self.assertEqual(m.get(1, 0), 1)
self.assertEqual(m.get(0, 1), 5)
self.assertEqual(m.get(1, 1), 4)
self.assertEqual(m.get(2, 2), 8)
self.assertEqual(m.get(2, 3), 9)
| 34.710526
| 64
| 0.585292
| 211
| 1,319
| 3.630332
| 0.161137
| 0.411227
| 0.438642
| 0.520888
| 0.763708
| 0.762402
| 0.732376
| 0.70235
| 0.515666
| 0.515666
| 0
| 0.077
| 0.24185
| 1,319
| 37
| 65
| 35.648649
| 0.689
| 0
| 0
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.65625
| 1
| 0.125
| false
| 0
| 0.0625
| 0
| 0.21875
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9449c06d0d198411378bc208a6fad5ff413b3bb5
| 7,361
|
py
|
Python
|
forums/tests/test_create.py
|
bhokin/ThaiMoung
|
afac52ad1fefe2c78b6d69b2b08c63aee699ee20
|
[
"MIT"
] | 1
|
2022-02-13T04:31:00.000Z
|
2022-02-13T04:31:00.000Z
|
forums/tests/test_create.py
|
XOQDY/ThaiMoung
|
afac52ad1fefe2c78b6d69b2b08c63aee699ee20
|
[
"MIT"
] | 40
|
2021-10-13T07:30:30.000Z
|
2022-01-30T08:24:14.000Z
|
forums/tests/test_create.py
|
XOQDY/ThaiMoung
|
afac52ad1fefe2c78b6d69b2b08c63aee699ee20
|
[
"MIT"
] | 3
|
2021-10-31T17:58:58.000Z
|
2021-11-12T17:05:14.000Z
|
"""Tests for create methods."""
from django.test import TestCase
from django.urls import reverse
from forums.views import *
class CreateTopic(TestCase):
"""Tests for create topic."""
def setUp(self):
"""Initialize a user with username, email, password and first name."""
self.credentials = {
'username': 'tester',
'password': 'secret123456',
'email': 'testerman@gmail.com'}
User.objects.create_user(**self.credentials)
def test_create_topic_without_login(self):
response = self.client.post(reverse('create_topic'),
{'topic_name': 'Batman', 'category': 'movie'},
follow=True)
self.assertRedirects(response, '/accounts/register_user')
self.assertEqual(Topic.objects.count(), 0)
def test_create_topic_with_login(self):
self.client.login(username='tester', password='secret123456')
response = self.client.post(reverse('create_topic'),
{'topic_name': 'Batman', 'category': 'movie'},
follow=True)
self.assertRedirects(response, '/forums/')
self.assertEqual(Topic.objects.count(), 1)
class CreatePost(TestCase):
"""Tests for create post."""
def setUp(self):
"""Initialize a user with username, email, password and first name."""
self.credentials = {
'username': 'tester',
'password': 'secret123456',
'email': 'testerman@gmail.com'}
self.user = User.objects.create_user(**self.credentials)
self.topic = Topic.objects.create(topic_name='Batman', category='movie')
def test_create_post_without_login(self):
response = self.client.post(reverse('create_forum'),
{'title': 'Batman 007',
'description': 'How do you feel after watching the movie?',
'topic': self.topic.topic_name,
'user': self.user},
follow=True)
self.assertRedirects(response, '/accounts/register_user')
self.assertEqual(Post.objects.count(), 0)
# def test_create_post_with_login(self):
# self.client.login(username='tester', password='secret123456')
# # post = Post.objects.create(title='Batman 5',
# # description='How do you feel after watching the movie?',
# # topic=Topic.objects.get(id=1))
# response = self.client.post(reverse('create_forum'), {
# 'title': 'Batman 007',
# 'description': 'How do you feel after watching the movie?',
# 'topic': self.topic,
# 'user': self.user},
# follow=True)
# self.assertTemplateUsed(response, 'forums/create_forum.html')
# self.assertEqual(Post.objects.count(), 1)
# self.assertRedirects(response, f'/forums/topic_{self.topic.pk}') # f'/forums/topic_{self.topic.pk}'
# def test_create_topic_already_created(self):
# self.client.login(username='tester', password='secret123456')
# response = self.client.post(reverse('create_topic'),
# {'topic_name': 'Batman', 'category': 'movie'},
# follow=True)
# messages = list(response.context['messages'])
# self.assertEqual(str(messages[0]), 'Topic Batman is already exists.')
# self.assertRedirects(response, '/forums/')
class CreateComment(TestCase):
"""Tests for create topic."""
def setUp(self):
"""Initialize a user with username, email, password and first name."""
self.credentials = {
'username': 'tester',
'password': 'secret123456',
'email': 'testerman@gmail.com'}
self.user = User.objects.create_user(**self.credentials)
self.topic = Topic.objects.create(topic_name='Batman', category='movie')
self.post = Post.objects.create(title='Batman 5',
description='How do you feel after watching the movie?',
topic=self.topic,
user=self.user)
def test_create_comment_without_login(self):
response = self.client.post(reverse('create_comment', args=[self.post.pk]),
{'post': self.post,
'description': 'Hello',
'user': self.user},
follow=True)
self.assertRedirects(response, '/accounts/register_user')
self.assertEqual(Comment.objects.count(), 0)
def test_create_comment_with_login(self):
self.client.login(username='tester', password='secret123456')
response = self.client.post(reverse('create_comment', args=[self.post.pk]),
{'post': self.post,
'description': 'Hello',
'user': self.user},
follow=True)
self.assertRedirects(response, f'/forums/detail/{self.post.pk}/')
self.assertEqual(Comment.objects.count(), 1)
class CreateReply(TestCase):
"""Tests for create topic."""
def setUp(self):
"""Initialize a user with username, email, password and first name."""
self.credentials = {
'username': 'tester',
'password': 'secret123456',
'email': 'testerman@gmail.com'}
self.user = User.objects.create_user(**self.credentials)
self.topic = Topic.objects.create(topic_name='Batman', category='movie')
self.post = Post.objects.create(title='Batman 5',
description='How do you feel after watching the movie?',
topic=self.topic,
user=self.user)
self.comment = Comment.objects.create(post=self.post,
description='Hello',
user=self.user)
def test_create_comment_without_login(self):
response = self.client.post(reverse('create_reply', args=[self.post.pk, self.comment.pk]),
{'comment': self.comment,
'description': 'Hooray',
'user': self.user},
follow=True)
self.assertRedirects(response, '/accounts/register_user')
self.assertEqual(Reply.objects.count(), 0)
def test_create_comment_with_login(self):
self.client.login(username='tester', password='secret123456')
response = self.client.post(reverse('create_reply', args=[self.post.pk, self.comment.pk]),
{'comment': self.comment,
'description': 'Hooray',
'user': self.user},
follow=True)
self.assertRedirects(response, f'/forums/detail/{self.post.pk}/')
self.assertEqual(Reply.objects.count(), 1)
| 47.490323
| 110
| 0.535253
| 700
| 7,361
| 5.538571
| 0.125714
| 0.037142
| 0.05107
| 0.078927
| 0.871034
| 0.833118
| 0.794945
| 0.794945
| 0.787722
| 0.779469
| 0
| 0.014996
| 0.338677
| 7,361
| 154
| 111
| 47.798701
| 0.78143
| 0.215867
| 0
| 0.752475
| 0
| 0
| 0.15927
| 0.026662
| 0
| 0
| 0
| 0
| 0.138614
| 1
| 0.108911
| false
| 0.069307
| 0.029703
| 0
| 0.178218
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
849c02618c73f34cfab41d3faded0d8ca2423856
| 1,513
|
py
|
Python
|
src/test.py
|
DavRack/final-ADA-2021-2
|
019d544f7d49c87ec32c989b0e1415648c7220ee
|
[
"MIT"
] | null | null | null |
src/test.py
|
DavRack/final-ADA-2021-2
|
019d544f7d49c87ec32c989b0e1415648c7220ee
|
[
"MIT"
] | null | null | null |
src/test.py
|
DavRack/final-ADA-2021-2
|
019d544f7d49c87ec32c989b0e1415648c7220ee
|
[
"MIT"
] | null | null | null |
import slow, fast
import time
import copy
import test500
cases = [10, 20, 50, 100, 200, 300, 400, 500, 1000]
resoults = []
def get_ms():
return round(time.time() * 1000)
for case in cases:
atributes = test500.atributes
products = test500.products[:case]
time_fast1 = get_ms()
ordered_products_fast = fast.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_fast = fast.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_fast = fast.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_fast = fast.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_fast = fast.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
time_fast2 = get_ms()
time_slow1 = get_ms()
ordered_products_slow = slow.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_slow = slow.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_slow = slow.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_slow = slow.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
ordered_products_slow = slow.order_products(copy.deepcopy(products), copy.deepcopy(atributes))
time_slow2 = get_ms()
resoults.append({"case numbers":case, "slow":(time_slow2-time_slow1)/5,"fast":(time_fast2-time_fast1)/5})
for r in resoults:
print(r)
| 39.815789
| 109
| 0.757436
| 198
| 1,513
| 5.570707
| 0.19697
| 0.217588
| 0.362647
| 0.226655
| 0.705349
| 0.705349
| 0.705349
| 0.705349
| 0.705349
| 0.698096
| 0
| 0.036172
| 0.122935
| 1,513
| 37
| 110
| 40.891892
| 0.795026
| 0
| 0
| 0.357143
| 0
| 0
| 0.013219
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.142857
| 0.035714
| 0.214286
| 0.035714
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
84b02aa71336f64fb5835f81b4d7a5fd881b07f6
| 83
|
py
|
Python
|
src/browserist/helper/operating_system.py
|
jakob-bagterp/browserist
|
76bd916dd217b7da3759fd6ec3374191002dc091
|
[
"Apache-2.0"
] | 2
|
2022-02-20T10:03:19.000Z
|
2022-03-22T11:17:10.000Z
|
src/browserist/helper/operating_system.py
|
jakob-bagterp/browserist
|
76bd916dd217b7da3759fd6ec3374191002dc091
|
[
"Apache-2.0"
] | null | null | null |
src/browserist/helper/operating_system.py
|
jakob-bagterp/browserist
|
76bd916dd217b7da3759fd6ec3374191002dc091
|
[
"Apache-2.0"
] | null | null | null |
import sys
def is_windows() -> bool:
return sys.platform.startswith("win32")
| 13.833333
| 43
| 0.698795
| 11
| 83
| 5.181818
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 0.168675
| 83
| 5
| 44
| 16.6
| 0.797101
| 0
| 0
| 0
| 0
| 0
| 0.060241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
84e8578c8af88d74d8ca7e781b37c7a36fea279a
| 17,498
|
py
|
Python
|
bgRemove.py
|
SihanGunarathne/ImageEditor
|
a9329d7aa0c4c59e4e840274b5d16025aba59512
|
[
"MIT"
] | null | null | null |
bgRemove.py
|
SihanGunarathne/ImageEditor
|
a9329d7aa0c4c59e4e840274b5d16025aba59512
|
[
"MIT"
] | null | null | null |
bgRemove.py
|
SihanGunarathne/ImageEditor
|
a9329d7aa0c4c59e4e840274b5d16025aba59512
|
[
"MIT"
] | null | null | null |
from tkinter import Toplevel, Label, Scale, Button, HORIZONTAL, RIGHT
import cv2 # for image processing
import numpy as np
class BgRemove(Toplevel):
def __init__(self, master=None):
Toplevel.__init__(self, master=master)
self.original_image = self.master.processed_image
self.bgremoved_image = None
self.bgremove_button = Button(master=self, text="Remove Background")
self.addbg1_button = Button(master=self, text="Background1")
self.addbg2_button = Button(master=self, text="Background2")
self.addbg3_button = Button(master=self, text="Background3")
self.addbg4_button = Button(master=self, text="Background4")
self.cancel_button = Button(master=self, text="Cancel")
self.apply_button = Button(master=self, text="Apply")
self.bgremove_button.bind("<ButtonRelease>", self.bgremove_button_released)
self.addbg1_button.bind("<ButtonRelease>", self.addbg1_button_released)
self.addbg2_button.bind("<ButtonRelease>", self.addbg2_button_released)
self.addbg3_button.bind("<ButtonRelease>", self.addbg3_button_released)
self.addbg4_button.bind("<ButtonRelease>", self.addbg4_button_released)
self.apply_button.bind("<ButtonRelease>", self.apply_button_released)
self.cancel_button.bind("<ButtonRelease>", self.cancel_button_released)
self.bgremove_button.pack()
self.addbg1_button.pack()
self.addbg2_button.pack()
self.addbg3_button.pack()
self.addbg4_button.pack()
self.cancel_button.pack(side=RIGHT)
self.apply_button.pack()
def bgremove_button_released(self, event):
self.bgremove()
self.show_image()
def addbg1_button_released(self, event):
self.addbg1()
self.show_image()
def addbg2_button_released(self, event):
self.addbg2()
self.show_image()
def addbg3_button_released(self, event):
self.addbg3()
self.show_image()
def addbg4_button_released(self, event):
self.addbg4()
self.show_image()
def apply_button_released(self, event):
self.master.processed_image = self.bgremoved_image
self.show_image()
self.close()
def cancel_button_released(self, event):
self.master.image_viewer.show_image()
self.close()
def show_image(self):
self.master.image_viewer.show_image(img=self.bgremoved_image)
def bgremove(self):
image1 = cv2.resize(self.original_image, (960, 540))
# show('Input image', image)
# blur the image to smmooth out the edges a bit, also reduces a bit of noise
blurred = cv2.GaussianBlur(image1, (5, 5), 0)
# convert the image to grayscale
gray = cv2.cvtColor(blurred, cv2.COLOR_BGR2GRAY)
# apply thresholding to conver the image to binary format
# after this operation all the pixels below 200 value will be 0...
# and all th pixels above 200 will be 255
ret, gray = cv2.threshold(gray, 220, 255, cv2.CHAIN_APPROX_NONE)
image2 = gray.astype(np.uint8)
contours, hierarchy = cv2.findContours(
image2,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE
)
largest_contour = max(contours, key=cv2.contourArea)
# find the largest contour area in the image
contour = largest_contour
image_contour = np.copy(image1)
cv2.drawContours(image_contour, [contour], 0, (0, 255, 0), 2, cv2.LINE_AA, maxLevel=2)
#show('Contour', image_contour)
# create a black `mask` the same size as the original grayscale image
mask = np.zeros_like(gray)
# fill the new mask with the shape of the largest contour
# all the pixels inside that area will be white
cv2.fillPoly(mask, [contour], 255)
# create a copy of the current mask
res_mask = np.copy(mask)
res_mask[mask == 0] = cv2.GC_BGD # obvious background pixels
res_mask[mask == 255] = cv2.GC_PR_BGD # probable background pixels
res_mask[mask == 255] = cv2.GC_FGD # obvious foreground pixels
# create a mask for obvious and probable foreground pixels
# all the obvious foreground pixels will be white and...
# ... all the probable foreground pixels will be black
mask2 = np.where(
(res_mask == cv2.GC_FGD) | (res_mask == cv2.GC_PR_FGD),
255,
0
).astype('uint8')
# create `new_mask3d` from `mask2` but with 3 dimensions instead of 2
new_mask3d = np.repeat(mask2[:, :, np.newaxis], 3, axis=2)
mask3d = new_mask3d
mask3d[new_mask3d > 0] = 255.0
mask3d[mask3d > 255] = 255.0
# apply Gaussian blurring to smoothen out the edges a bit
# `mask3d` is the final foreground mask (not extracted foreground image)
mask3d = cv2.GaussianBlur(mask3d, (5, 5), 0)
# show('Foreground mask', mask3d)
# create the foreground image by zeroing out the pixels where `mask2`...
# ... has black pixels
foreground = np.copy(image1).astype(float)
foreground[mask2 == 0] = 0
# save the images to disk
# save_name = args['input'].split('/')[-1].split('.')[0]
# cv2.imwrite(f"outputs/{save_name}_foreground.png", foreground)
# cv2.imwrite(f"outputs/{save_name}_foreground_mask.png", mask3d)
#cv2.imwrite(f"outputs/{save_name}_contour.png", image_contour)
#cv2.imwrite("foreground.png", foreground)
self.bgremoved_image = cv2.resize(foreground.astype(np.uint8), (960, 540))
def addbg1(self):
image1 = cv2.resize(self.original_image, (960, 540))
# show('Input image', image)
# blur the image to smmooth out the edges a bit, also reduces a bit of noise
blurred = cv2.GaussianBlur(image1, (5, 5), 0)
# convert the image to grayscale
gray = cv2.cvtColor(blurred, cv2.COLOR_BGR2GRAY)
# apply thresholding to conver the image to binary format
# after this operation all the pixels below 200 value will be 0...
# and all th pixels above 200 will be 255
ret, gray = cv2.threshold(gray, 200, 255, cv2.CHAIN_APPROX_NONE)
image2 = gray.astype(np.uint8)
contours, hierarchy = cv2.findContours(
image2,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE
)
largest_contour = max(contours, key=cv2.contourArea)
# find the largest contour area in the image
contour = largest_contour
image_contour = np.copy(image1)
cv2.drawContours(image_contour, [contour], 0, (0, 255, 0), 2, cv2.LINE_AA, maxLevel=2)
# show('Contour', image_contour)
# create a black `mask` the same size as the original grayscale image
mask = np.zeros_like(gray)
# fill the new mask with the shape of the largest contour
# all the pixels inside that area will be white
cv2.fillPoly(mask, [contour], 255)
# create a copy of the current mask
res_mask = np.copy(mask)
res_mask[mask == 0] = cv2.GC_BGD # obvious background pixels
res_mask[mask == 255] = cv2.GC_PR_BGD # probable background pixels
res_mask[mask == 255] = cv2.GC_FGD # obvious foreground pixels
# create a mask for obvious and probable foreground pixels
# all the obvious foreground pixels will be white and...
# ... all the probable foreground pixels will be black
mask2 = np.where(
(res_mask == cv2.GC_FGD) | (res_mask == cv2.GC_PR_FGD),
255,
0
).astype('uint8')
# create `new_mask3d` from `mask2` but with 3 dimensions instead of 2
new_mask3d = np.repeat(mask2[:, :, np.newaxis], 3, axis=2)
mask3d = new_mask3d
mask3d[new_mask3d > 0] = 255.0
mask3d[mask3d > 255] = 255.0
# apply Gaussian blurring to smoothen out the edges a bit
# `mask3d` is the final foreground mask (not extracted foreground image)
mask3d = cv2.GaussianBlur(mask3d, (5, 5), 0)
# show('Foreground mask', mask3d)
# create the foreground image by zeroing out the pixels where `mask2`...
# ... has black pixels
foreground = np.copy(image1).astype(float)
foreground[mask2 == 0] = 0
# normalization of mask3d mask, keeping values between 0 and 1
mask3d = mask3d / 255.0
# get the scaled product by multiplying
foreground = cv2.multiply(mask3d, foreground)
# read the new background image
background = cv2.imread('input/background1.jpg')
# resize it according to the foreground image
background = cv2.resize(background, (foreground.shape[1], foreground.shape[0]))
background = background.astype(np.float)
# get the scaled product by multiplying
background = cv2.multiply(1.0 - mask3d, background)
# add the foreground and new background image
new_image = cv2.add(foreground, background)
#show('New image', new_image.astype(np.uint8))
#cv2.imwrite(f"outputs/{save_name}_new_background.jpg", new_image)
self.bgremoved_image = cv2.resize(new_image.astype(np.uint8), (960, 540))
def addbg2(self):
image1 = cv2.resize(self.original_image, (960, 540))
blurred = cv2.GaussianBlur(image1, (5, 5), 0)
gray = cv2.cvtColor(blurred, cv2.COLOR_BGR2GRAY)
ret, gray = cv2.threshold(gray, 200, 255, cv2.CHAIN_APPROX_NONE)
image2 = gray.astype(np.uint8)
contours, hierarchy = cv2.findContours(
image2,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE
)
largest_contour = max(contours, key=cv2.contourArea)
# find the largest contour area in the image
contour = largest_contour
image_contour = np.copy(image1)
cv2.drawContours(image_contour, [contour], 0, (0, 255, 0), 2, cv2.LINE_AA, maxLevel=2)
# show('Contour', image_contour)
# create a black `mask` the same size as the original grayscale image
mask = np.zeros_like(gray)
cv2.fillPoly(mask, [contour], 255)
# create a copy of the current mask
res_mask = np.copy(mask)
res_mask[mask == 0] = cv2.GC_BGD # obvious background pixels
res_mask[mask == 255] = cv2.GC_PR_BGD # probable background pixels
res_mask[mask == 255] = cv2.GC_FGD # obvious foreground pixels
mask2 = np.where(
(res_mask == cv2.GC_FGD) | (res_mask == cv2.GC_PR_FGD),
255,
0
).astype('uint8')
# create `new_mask3d` from `mask2` but with 3 dimensions instead of 2
new_mask3d = np.repeat(mask2[:, :, np.newaxis], 3, axis=2)
mask3d = new_mask3d
mask3d[new_mask3d > 0] = 255.0
mask3d[mask3d > 255] = 255.0
mask3d = cv2.GaussianBlur(mask3d, (5, 5), 0)
# show('Foreground mask', mask3d)
# create the foreground image by zeroing out the pixels where `mask2`...
# ... has black pixels
foreground = np.copy(image1).astype(float)
foreground[mask2 == 0] = 0
# normalization of mask3d mask, keeping values between 0 and 1
mask3d = mask3d / 255.0
# get the scaled product by multiplying
foreground = cv2.multiply(mask3d, foreground)
# read the new background image
background = cv2.imread('input/background2.jpg')
# resize it according to the foreground image
background = cv2.resize(background, (foreground.shape[1], foreground.shape[0]))
background = background.astype(np.float)
# get the scaled product by multiplying
background = cv2.multiply(1.0 - mask3d, background)
# add the foreground and new background image
new_image = cv2.add(foreground, background)
self.bgremoved_image = cv2.resize(new_image.astype(np.uint8), (960, 540))
def addbg3(self):
image1 = cv2.resize(self.original_image, (960, 540))
blurred = cv2.GaussianBlur(image1, (5, 5), 0)
gray = cv2.cvtColor(blurred, cv2.COLOR_BGR2GRAY)
ret, gray = cv2.threshold(gray, 200, 255, cv2.CHAIN_APPROX_NONE)
image2 = gray.astype(np.uint8)
contours, hierarchy = cv2.findContours(
image2,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE
)
largest_contour = max(contours, key=cv2.contourArea)
# find the largest contour area in the image
contour = largest_contour
image_contour = np.copy(image1)
cv2.drawContours(image_contour, [contour], 0, (0, 255, 0), 2, cv2.LINE_AA, maxLevel=2)
# show('Contour', image_contour)
# create a black `mask` the same size as the original grayscale image
mask = np.zeros_like(gray)
cv2.fillPoly(mask, [contour], 255)
# create a copy of the current mask
res_mask = np.copy(mask)
res_mask[mask == 0] = cv2.GC_BGD # obvious background pixels
res_mask[mask == 255] = cv2.GC_PR_BGD # probable background pixels
res_mask[mask == 255] = cv2.GC_FGD # obvious foreground pixels
mask2 = np.where(
(res_mask == cv2.GC_FGD) | (res_mask == cv2.GC_PR_FGD),
255,
0
).astype('uint8')
# create `new_mask3d` from `mask2` but with 3 dimensions instead of 2
new_mask3d = np.repeat(mask2[:, :, np.newaxis], 3, axis=2)
mask3d = new_mask3d
mask3d[new_mask3d > 0] = 255.0
mask3d[mask3d > 255] = 255.0
mask3d = cv2.GaussianBlur(mask3d, (5, 5), 0)
# show('Foreground mask', mask3d)
# create the foreground image by zeroing out the pixels where `mask2`...
# ... has black pixels
foreground = np.copy(image1).astype(float)
foreground[mask2 == 0] = 0
# normalization of mask3d mask, keeping values between 0 and 1
mask3d = mask3d / 255.0
# get the scaled product by multiplying
foreground = cv2.multiply(mask3d, foreground)
# read the new background image
background = cv2.imread('input/background3.jpg')
# resize it according to the foreground image
background = cv2.resize(background, (foreground.shape[1], foreground.shape[0]))
background = background.astype(np.float)
# get the scaled product by multiplying
background = cv2.multiply(1.0 - mask3d, background)
# add the foreground and new background image
new_image = cv2.add(foreground, background)
self.bgremoved_image = cv2.resize(new_image.astype(np.uint8), (960, 540))
def addbg4(self):
image1 = cv2.resize(self.original_image, (960, 540))
blurred = cv2.GaussianBlur(image1, (5, 5), 0)
gray = cv2.cvtColor(blurred, cv2.COLOR_BGR2GRAY)
ret, gray = cv2.threshold(gray, 200, 255, cv2.CHAIN_APPROX_NONE)
image2 = gray.astype(np.uint8)
contours, hierarchy = cv2.findContours(
image2,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE
)
largest_contour = max(contours, key=cv2.contourArea)
# find the largest contour area in the image
contour = largest_contour
image_contour = np.copy(image1)
cv2.drawContours(image_contour, [contour], 0, (0, 255, 0), 2, cv2.LINE_AA, maxLevel=2)
# show('Contour', image_contour)
# create a black `mask` the same size as the original grayscale image
mask = np.zeros_like(gray)
cv2.fillPoly(mask, [contour], 255)
# create a copy of the current mask
res_mask = np.copy(mask)
res_mask[mask == 0] = cv2.GC_BGD # obvious background pixels
res_mask[mask == 255] = cv2.GC_PR_BGD # probable background pixels
res_mask[mask == 255] = cv2.GC_FGD # obvious foreground pixels
mask2 = np.where(
(res_mask == cv2.GC_FGD) | (res_mask == cv2.GC_PR_FGD),
255,
0
).astype('uint8')
# create `new_mask3d` from `mask2` but with 3 dimensions instead of 2
new_mask3d = np.repeat(mask2[:, :, np.newaxis], 3, axis=2)
mask3d = new_mask3d
mask3d[new_mask3d > 0] = 255.0
mask3d[mask3d > 255] = 255.0
mask3d = cv2.GaussianBlur(mask3d, (5, 5), 0)
# show('Foreground mask', mask3d)
# create the foreground image by zeroing out the pixels where `mask2`...
# ... has black pixels
foreground = np.copy(image1).astype(float)
foreground[mask2 == 0] = 0
# normalization of mask3d mask, keeping values between 0 and 1
mask3d = mask3d / 255.0
# get the scaled product by multiplying
foreground = cv2.multiply(mask3d, foreground)
# read the new background image
background = cv2.imread('input/background4.jpg')
# resize it according to the foreground image
background = cv2.resize(background, (foreground.shape[1], foreground.shape[0]))
background = background.astype(np.float)
# get the scaled product by multiplying
background = cv2.multiply(1.0 - mask3d, background)
# add the foreground and new background image
new_image = cv2.add(foreground, background)
self.bgremoved_image = cv2.resize(new_image.astype(np.uint8), (960, 540))
def close(self):
self.destroy()
| 42.0625
| 94
| 0.630986
| 2,246
| 17,498
| 4.80187
| 0.086376
| 0.019471
| 0.015299
| 0.021326
| 0.874363
| 0.837459
| 0.82077
| 0.806305
| 0.806305
| 0.806305
| 0
| 0.054442
| 0.267288
| 17,498
| 415
| 95
| 42.163855
| 0.786756
| 0.28969
| 0
| 0.731225
| 0
| 0
| 0.023226
| 0.006822
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059289
| false
| 0
| 0.011858
| 0
| 0.075099
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
84ebbeca1c5131f18b021fbc9baceb2b70863ae3
| 12,650
|
py
|
Python
|
scikits/crab/metrics/tests/test_classes.py
|
CrazyCaspian/crab
|
beb355538acc419b82beae3c6845d1e1cff5d26b
|
[
"BSD-3-Clause"
] | 863
|
2015-01-01T05:09:14.000Z
|
2022-03-27T05:05:25.000Z
|
scikits/crab/metrics/tests/test_classes.py
|
qqwjq/crab
|
beb355538acc419b82beae3c6845d1e1cff5d26b
|
[
"BSD-3-Clause"
] | 21
|
2015-01-02T15:42:46.000Z
|
2018-08-18T16:02:03.000Z
|
scikits/crab/metrics/tests/test_classes.py
|
qqwjq/crab
|
beb355538acc419b82beae3c6845d1e1cff5d26b
|
[
"BSD-3-Clause"
] | 318
|
2015-01-01T13:13:04.000Z
|
2022-03-29T11:38:08.000Z
|
from nose.tools import assert_equals, assert_almost_equals, assert_raises, assert_true
from ...similarities.basic_similarities import UserSimilarity
from ...metrics.pairwise import euclidean_distances, jaccard_coefficient
from ...models.classes import MatrixPreferenceDataModel, \
MatrixBooleanPrefDataModel
from ...recommenders.knn import UserBasedRecommender
from ..classes import CfEvaluator
from ...recommenders.knn.neighborhood_strategies import NearestNeighborsStrategy
movies = {'Marcel Caraciolo': {'Lady in the Water': 2.5, 'Snakes on a Plane': 3.5,
'Just My Luck': 3.0, 'Superman Returns': 3.5, 'You, Me and Dupree': 2.5,
'The Night Listener': 3.0},
'Luciana Nunes': {'Lady in the Water': 3.0, 'Snakes on a Plane': 3.5,
'Just My Luck': 1.5, 'Superman Returns': 5.0, 'The Night Listener': 3.0,
'You, Me and Dupree': 3.5},
'Leopoldo Pires': {'Lady in the Water': 2.5, 'Snakes on a Plane': 3.0,
'Superman Returns': 3.5, 'The Night Listener': 4.0},
'Lorena Abreu': {'Snakes on a Plane': 3.5, 'Just My Luck': 3.0,
'The Night Listener': 4.5, 'Superman Returns': 4.0,
'You, Me and Dupree': 2.5},
'Steve Gates': {'Lady in the Water': 3.0, 'Snakes on a Plane': 4.0,
'Just My Luck': 2.0, 'Superman Returns': 3.0, 'The Night Listener': 3.0,
'You, Me and Dupree': 2.0},
'Sheldom': {'Lady in the Water': 3.0, 'Snakes on a Plane': 4.0,
'The Night Listener': 3.0, 'Superman Returns': 5.0, 'You, Me and Dupree': 3.5},
'Penny Frewman': {'Snakes on a Plane': 4.5, 'You, Me and Dupree': 1.0,
'Superman Returns': 4.0},
'Maria Gabriela': {}}
model = MatrixPreferenceDataModel(movies)
boolean_model = MatrixBooleanPrefDataModel(movies)
similarity = UserSimilarity(model, euclidean_distances)
boolean_similarity = UserSimilarity(boolean_model, jaccard_coefficient)
neighborhood = NearestNeighborsStrategy()
recsys = UserBasedRecommender(model, similarity, neighborhood)
boolean_recsys = UserBasedRecommender(boolean_model, boolean_similarity, neighborhood)
def test_root_CfEvaluator_evaluate():
"""Check evaluate method in CfEvaluator """
evaluator = CfEvaluator()
#Test with invalid metric
assert_raises(ValueError, evaluator.evaluate, recsys, 'rank')
#Test with specified metric
rmse = evaluator.evaluate(recsys, 'rmse', permutation=False)
assert_true(rmse['rmse'] >= 0.0 and rmse['rmse'] <= 1.0)
mae = evaluator.evaluate(recsys, 'mae', permutation=False)
assert_true(mae['mae'] >= 0.0 and mae['mae'] <= 1.0)
nmae = evaluator.evaluate(recsys, 'nmae', permutation=False)
assert_true(nmae['nmae'] >= 0.0 and nmae['nmae'] <= 1.0)
precision = evaluator.evaluate(recsys, 'precision',
permutation=False)
assert_true(precision['precision'] >= 0.0 and precision['precision'] <= 1.0)
recall = evaluator.evaluate(recsys, 'recall', permutation=False)
assert_true(recall['recall'] >= 0.0 and recall['recall'] <= 1.0)
f1score = evaluator.evaluate(recsys, 'f1score', permutation=False)
assert_true(f1score['f1score'] >= 0.0 and f1score['f1score'] <= 1.0)
all_scores = evaluator.evaluate(recsys, permutation=False)
assert_true(all_scores['f1score'] >= 0.0 and all_scores['f1score'] <= 1.0)
assert_true(all_scores['recall'] >= 0.0 and all_scores['recall'] <= 1.0)
assert_true(all_scores['precision'] >= 0.0 and all_scores['precision'] <= 1.0)
assert_true(all_scores['nmae'] >= 0.0 and all_scores['nmae'] <= 1.0)
assert_true(all_scores['mae'] >= 0.0 and all_scores['mae'] <= 1.0)
assert_true(all_scores['rmse'] >= 0.0 and all_scores['rmse'] <= 1.0)
#With values at sampling.
nmae = evaluator.evaluate(recsys, 'nmae', permutation=False,
sampling_users=0.6, sampling_ratings=0.6)
assert_true(nmae['nmae'] >= 0.0 and nmae['nmae'] <= 1.0)
#Test with boolean recsys
assert_raises(ValueError, evaluator.evaluate, boolean_recsys, 'rank')
#Test with specified metric
rmse = evaluator.evaluate(boolean_recsys, 'rmse', permutation=False)
assert_true(rmse['rmse'] >= 0.0 and rmse['rmse'] <= 1.0)
mae = evaluator.evaluate(boolean_recsys, 'mae', permutation=False)
assert_true(mae['mae'] >= 0.0 and mae['mae'] <= 1.0)
nmae = evaluator.evaluate(boolean_recsys, 'nmae', permutation=False)
assert_true(nmae['nmae'] >= 0.0 and nmae['nmae'] <= 1.0)
precision = evaluator.evaluate(boolean_recsys, 'precision',
permutation=False)
assert_true(precision['precision'] >= 0.0 and precision['precision'] <= 1.0)
recall = evaluator.evaluate(boolean_recsys, 'recall', permutation=False)
assert_true(recall['recall'] >= 0.0 and recall['recall'] <= 1.0)
f1score = evaluator.evaluate(boolean_recsys, 'f1score', permutation=False)
assert_true(f1score['f1score'] >= 0.0 and f1score['f1score'] <= 1.0)
all_scores = evaluator.evaluate(recsys, permutation=False)
assert_true(all_scores['f1score'] >= 0.0 and all_scores['f1score'] <= 1.0)
assert_true(all_scores['recall'] >= 0.0 and all_scores['recall'] <= 1.0)
assert_true(all_scores['precision'] >= 0.0 and all_scores['precision'] <= 1.0)
assert_true(all_scores['nmae'] >= 0.0 and all_scores['nmae'] <= 1.0)
assert_true(all_scores['mae'] >= 0.0 and all_scores['mae'] <= 1.0)
assert_true(all_scores['rmse'] >= 0.0 and all_scores['rmse'] <= 1.0)
#With values at sampling.
nmae = evaluator.evaluate(boolean_recsys, 'nmae', permutation=False,
sampling_users=0.6, sampling_ratings=0.6)
assert_true(nmae['nmae'] >= 0.0 and nmae['nmae'] <= 1.0)
def test_root_CfEvaluator_evaluate_on_split():
"""Check evaluate_on_split method in CfEvaluator """
evaluator = CfEvaluator()
#Test with invalid metric
assert_raises(ValueError, evaluator.evaluate_on_split, recsys, 'rank')
#Test with specified metric
rmse = evaluator.evaluate_on_split(recsys, 'rmse', permutation=False)
for p in rmse[0]['error']:
assert_true(p['rmse'] >= 0.0 and p['rmse'] <= 1.0)
assert_true(rmse[1]['final_error']['avg']['rmse'] >= 0.0 and
rmse[1]['final_error']['stdev']['rmse'] <= 1.0)
mae = evaluator.evaluate_on_split(recsys, 'mae', permutation=False)
for p in mae[0]['error']:
assert_true(p['mae'] >= 0.0 and p['mae'] <= 1.0)
assert_true(mae[1]['final_error']['avg']['mae'] >= 0.0 and
mae[1]['final_error']['stdev']['mae'] <= 1.0)
nmae = evaluator.evaluate_on_split(recsys, 'nmae', permutation=False)
for p in nmae[0]['error']:
assert_true(p['nmae'] >= 0.0 and p['nmae'] <= 1.0)
assert_true(nmae[1]['final_error']['avg']['nmae'] >= 0.0 and
nmae[1]['final_error']['stdev']['nmae'] <= 1.0)
#Test with IR statistics
precision = evaluator.evaluate_on_split(recsys, 'precision', permutation=False)
for p in precision[0]['ir']:
assert_true(p['precision'] >= 0.0 and p['precision'] <= 1.0)
assert_true(precision[1]['final_error']['avg']['precision'] >= 0.0 and
precision[1]['final_error']['stdev']['precision'] <= 1.0)
recall = evaluator.evaluate_on_split(recsys, 'recall', permutation=False)
for p in recall[0]['ir']:
assert_true(p['recall'] >= 0.0 and p['recall'] <= 1.0)
assert_true(recall[1]['final_error']['avg']['recall'] >= 0.0 and
recall[1]['final_error']['stdev']['recall'] <= 1.0)
f1score = evaluator.evaluate_on_split(recsys, 'f1score', permutation=False)
for p in f1score[0]['ir']:
assert_true(p['f1score'] >= 0.0 and p['f1score'] <= 1.0)
assert_true(f1score[1]['final_error']['avg']['f1score'] >= 0.0 and
f1score[1]['final_error']['stdev']['f1score'] <= 1.0)
all_scores = evaluator.evaluate_on_split(recsys, permutation=False)
for p in all_scores[0]['ir']:
assert_true(p['f1score'] >= 0.0 and p['f1score'] <= 1.0)
assert_true(p['recall'] >= 0.0 and p['recall'] <= 1.0)
assert_true(p['precision'] >= 0.0 and p['precision'] <= 1.0)
for p in all_scores[0]['error']:
assert_true(p['mae'] >= 0.0 and p['mae'] <= 1.0)
assert_true(p['rmse'] >= 0.0 and p['rmse'] <= 1.0)
assert_true(p['nmae'] >= 0.0 and p['nmae'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['f1score'] >= 0.0 and
all_scores[1]['final_error']['stdev']['f1score'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['recall'] >= 0.0 and
all_scores[1]['final_error']['stdev']['recall'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['precision'] >= 0.0 and
all_scores[1]['final_error']['stdev']['precision'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['rmse'] >= 0.0 and
all_scores[1]['final_error']['stdev']['rmse'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['mae'] >= 0.0 and
all_scores[1]['final_error']['stdev']['mae'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['nmae'] >= 0.0 and
all_scores[1]['final_error']['stdev']['nmae'] <= 1.0)
#Test with boolean model
#Test with invalid metric
assert_raises(ValueError, evaluator.evaluate_on_split, boolean_recsys, 'rank')
#Test with specified metric
rmse = evaluator.evaluate_on_split(boolean_recsys, 'rmse', permutation=False)
for p in rmse[0]['error']:
assert_true(p['rmse'] >= 0.0 and p['rmse'] <= 1.0)
assert_true(rmse[1]['final_error']['avg']['rmse'] >= 0.0 and
rmse[1]['final_error']['stdev']['rmse'] <= 1.0)
mae = evaluator.evaluate_on_split(boolean_recsys, 'mae', permutation=False)
for p in mae[0]['error']:
assert_true(p['mae'] >= 0.0 and p['mae'] <= 1.0)
assert_true(mae[1]['final_error']['avg']['mae'] >= 0.0 and
mae[1]['final_error']['stdev']['mae'] <= 1.0)
nmae = evaluator.evaluate_on_split(boolean_recsys, 'nmae', permutation=False)
for p in nmae[0]['error']:
assert_true(p['nmae'] >= 0.0 and p['nmae'] <= 1.0)
assert_true(nmae[1]['final_error']['avg']['nmae'] >= 0.0 and
nmae[1]['final_error']['stdev']['nmae'] <= 1.0)
#Test with IR statistics
precision = evaluator.evaluate_on_split(boolean_recsys, 'precision', permutation=False)
for p in precision[0]['ir']:
assert_true(p['precision'] >= 0.0 and p['precision'] <= 1.0)
assert_true(precision[1]['final_error']['avg']['precision'] >= 0.0 and
precision[1]['final_error']['stdev']['precision'] <= 1.0)
recall = evaluator.evaluate_on_split(boolean_recsys, 'recall', permutation=False)
for p in recall[0]['ir']:
assert_true(p['recall'] >= 0.0 and p['recall'] <= 1.0)
assert_true(recall[1]['final_error']['avg']['recall'] >= 0.0 and
recall[1]['final_error']['stdev']['recall'] <= 1.0)
f1score = evaluator.evaluate_on_split(boolean_recsys, 'f1score', permutation=False)
for p in f1score[0]['ir']:
assert_true(p['f1score'] >= 0.0 and p['f1score'] <= 1.0)
assert_true(f1score[1]['final_error']['avg']['f1score'] >= 0.0 and
f1score[1]['final_error']['stdev']['f1score'] <= 1.0)
all_scores = evaluator.evaluate_on_split(boolean_recsys, permutation=False)
for p in all_scores[0]['ir']:
assert_true(p['f1score'] >= 0.0 and p['f1score'] <= 1.0)
assert_true(p['recall'] >= 0.0 and p['recall'] <= 1.0)
assert_true(p['precision'] >= 0.0 and p['precision'] <= 1.0)
for p in all_scores[0]['error']:
assert_true(p['mae'] >= 0.0 and p['mae'] <= 1.0)
assert_true(p['rmse'] >= 0.0 and p['rmse'] <= 1.0)
assert_true(p['nmae'] >= 0.0 and p['nmae'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['f1score'] >= 0.0 and
all_scores[1]['final_error']['stdev']['f1score'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['recall'] >= 0.0 and
all_scores[1]['final_error']['stdev']['recall'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['precision'] >= 0.0 and
all_scores[1]['final_error']['stdev']['precision'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['rmse'] >= 0.0 and
all_scores[1]['final_error']['stdev']['rmse'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['mae'] >= 0.0 and
all_scores[1]['final_error']['stdev']['mae'] <= 1.0)
assert_true(all_scores[1]['final_error']['avg']['nmae'] >= 0.0 and
all_scores[1]['final_error']['stdev']['nmae'] <= 1.0)
| 51.008065
| 91
| 0.62585
| 1,821
| 12,650
| 4.204283
| 0.061505
| 0.097962
| 0.048328
| 0.065831
| 0.866641
| 0.840648
| 0.82001
| 0.817529
| 0.8157
| 0.806034
| 0
| 0.046534
| 0.184585
| 12,650
| 247
| 92
| 51.214575
| 0.695686
| 0.031779
| 0
| 0.642105
| 0
| 0
| 0.197955
| 0
| 0
| 0
| 0
| 0
| 0.415789
| 1
| 0.010526
| false
| 0
| 0.036842
| 0
| 0.047368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ca169aec4538cb9faef3722f2f5df3f9371775aa
| 91,741
|
py
|
Python
|
tccli/services/dlc/dlc_client.py
|
HS-Gray/tencentcloud-cli
|
3822fcfdfed570fb526fe49abe6793e2f9127f4a
|
[
"Apache-2.0"
] | 47
|
2018-05-31T11:26:25.000Z
|
2022-03-08T02:12:45.000Z
|
tccli/services/dlc/dlc_client.py
|
HS-Gray/tencentcloud-cli
|
3822fcfdfed570fb526fe49abe6793e2f9127f4a
|
[
"Apache-2.0"
] | 23
|
2018-06-14T10:46:30.000Z
|
2022-02-28T02:53:09.000Z
|
tccli/services/dlc/dlc_client.py
|
HS-Gray/tencentcloud-cli
|
3822fcfdfed570fb526fe49abe6793e2f9127f4a
|
[
"Apache-2.0"
] | 22
|
2018-10-22T09:49:45.000Z
|
2022-03-30T08:06:04.000Z
|
# -*- coding: utf-8 -*-
import os
import sys
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError, ClientError, ParamError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.dlc.v20210125 import dlc_client as dlc_client_v20210125
from tencentcloud.dlc.v20210125 import models as models_v20210125
from jmespath import search
import time
from tccli import six
def doDetachWorkGroupPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DetachWorkGroupPolicyRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DetachWorkGroupPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTaskRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAddUsersToWorkGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AddUsersToWorkGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AddUsersToWorkGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTaskResult(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTaskResultRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTaskResult(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteScript(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteScriptRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteScript(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTable(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTableRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateTable(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUsers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUsersRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeUsers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTasksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeScripts(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeScriptsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeScripts(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteWorkGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteWorkGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteWorkGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateStoreLocation(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateStoreLocationRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateStoreLocation(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteUsersFromWorkGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteUsersFromWorkGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteUsersFromWorkGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateWorkGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateWorkGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateWorkGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUnbindWorkGroupsFromUser(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UnbindWorkGroupsFromUserRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.UnbindWorkGroupsFromUser(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAttachWorkGroupPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AttachWorkGroupPolicyRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AttachWorkGroupPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWorkGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWorkGroupsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeWorkGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCancelTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CancelTaskRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CancelTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateScript(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateScriptRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateScript(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDatabase(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDatabaseRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateDatabase(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyWorkGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyWorkGroupRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyWorkGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeViews(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeViewsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeViews(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTable(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTableRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTable(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTasksRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindWorkGroupsToUser(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindWorkGroupsToUserRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.BindWorkGroupsToUser(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyUser(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyUserRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyUser(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteUser(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteUserRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteUser(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDatabases(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDatabasesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDatabases(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTasksInOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTasksInOrderRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateTasksInOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAttachUserPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AttachUserPolicyRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.AttachUserPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeStoreLocation(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeStoreLocationRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeStoreLocation(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateUser(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateUserRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateUser(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDetachUserPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DetachUserPolicyRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DetachUserPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTables(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.DlcClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTablesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTables(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20210125": dlc_client_v20210125,
}
MODELS_MAP = {
"v20210125": models_v20210125,
}
ACTION_MAP = {
"DetachWorkGroupPolicy": doDetachWorkGroupPolicy,
"CreateTask": doCreateTask,
"AddUsersToWorkGroup": doAddUsersToWorkGroup,
"DescribeTaskResult": doDescribeTaskResult,
"DeleteScript": doDeleteScript,
"CreateTable": doCreateTable,
"DescribeUsers": doDescribeUsers,
"CreateTasks": doCreateTasks,
"DescribeScripts": doDescribeScripts,
"DeleteWorkGroup": doDeleteWorkGroup,
"CreateStoreLocation": doCreateStoreLocation,
"DeleteUsersFromWorkGroup": doDeleteUsersFromWorkGroup,
"CreateWorkGroup": doCreateWorkGroup,
"UnbindWorkGroupsFromUser": doUnbindWorkGroupsFromUser,
"AttachWorkGroupPolicy": doAttachWorkGroupPolicy,
"DescribeWorkGroups": doDescribeWorkGroups,
"CancelTask": doCancelTask,
"CreateScript": doCreateScript,
"CreateDatabase": doCreateDatabase,
"ModifyWorkGroup": doModifyWorkGroup,
"DescribeViews": doDescribeViews,
"DescribeTable": doDescribeTable,
"DescribeTasks": doDescribeTasks,
"BindWorkGroupsToUser": doBindWorkGroupsToUser,
"ModifyUser": doModifyUser,
"DeleteUser": doDeleteUser,
"DescribeDatabases": doDescribeDatabases,
"CreateTasksInOrder": doCreateTasksInOrder,
"AttachUserPolicy": doAttachUserPolicy,
"DescribeStoreLocation": doDescribeStoreLocation,
"CreateUser": doCreateUser,
"DetachUserPolicy": doDetachUserPolicy,
"DescribeTables": doDescribeTables,
}
AVAILABLE_VERSION_LIST = [
"v20210125",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
if os.environ.get(OptionsDefine.ENV_ROLE_ARN) and os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME):
cred[OptionsDefine.RoleArn] = os.environ.get(OptionsDefine.ENV_ROLE_ARN)
cred[OptionsDefine.RoleSessionName] = os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
elif not g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param.replace('_', '-') in [OptionsDefine.RoleArn, OptionsDefine.RoleSessionName]:
if param.replace('_', '-') in cred:
g_param[param] = cred[param.replace('_', '-')]
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["dlc"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["dlc"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
if g_param[OptionsDefine.Waiter]:
param = eval(g_param[OptionsDefine.Waiter])
if 'expr' not in param:
raise Exception('`expr` in `--waiter` must be defined')
if 'to' not in param:
raise Exception('`to` in `--waiter` must be defined')
if 'timeout' not in param:
if 'waiter' in conf and 'timeout' in conf['waiter']:
param['timeout'] = conf['waiter']['timeout']
else:
param['timeout'] = 180
if 'interval' not in param:
if 'waiter' in conf and 'interval' in conf['waiter']:
param['interval'] = conf['waiter']['interval']
else:
param['timeout'] = 5
param['interval'] = min(param['interval'], param['timeout'])
g_param['OptionsDefine.WaiterInfo'] = param
# 如果在配置文件中读取字段的值,python2中的json.load函数会读取unicode类型的值,因此这里要转化类型
if six.PY2:
for key, value in g_param.items():
if isinstance(value, six.text_type):
g_param[key] = value.encode('utf-8')
return g_param
| 52.155202
| 155
| 0.676982
| 9,934
| 91,741
| 6.028287
| 0.02728
| 0.098389
| 0.296652
| 0.128329
| 0.911814
| 0.907456
| 0.905352
| 0.902112
| 0.898205
| 0.894865
| 0
| 0.004619
| 0.188138
| 91,741
| 1,758
| 156
| 52.184869
| 0.799412
| 0.00448
| 0
| 0.79484
| 0
| 0
| 0.138138
| 0.070882
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021499
| false
| 0
| 0.009828
| 0.000614
| 0.032555
| 0.02027
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ca18dcf23635fdb21456a967266a4ff2e9dd8c73
| 12,930
|
py
|
Python
|
fdk_client/platform/models/ContentValidator.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/ContentValidator.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/ContentValidator.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
"""Class Validators."""
from marshmallow import fields, Schema
from marshmallow.validate import OneOf
from ..enums import *
from ..models.BaseSchema import BaseSchema
class ContentValidator:
class getAnnouncementsList(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class createAnnouncement(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getAnnouncementById(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
announcement_id = fields.Str(required=False)
class updateAnnouncement(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
announcement_id = fields.Str(required=False)
class updateAnnouncementSchedule(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
announcement_id = fields.Str(required=False)
class deleteAnnouncement(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
announcement_id = fields.Str(required=False)
class createBlog(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getBlogs(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class updateBlog(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class deleteBlog(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class getComponentById(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
slug = fields.Str(required=False)
class getFaqCategories(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getFaqCategoryBySlugOrId(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id_or_slug = fields.Str(required=False)
class createFaqCategory(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateFaqCategory(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class deleteFaqCategory(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class getFaqsByCategoryIdOrSlug(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id_or_slug = fields.Str(required=False)
class addFaq(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
category_id = fields.Str(required=False)
class updateFaq(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
category_id = fields.Str(required=False)
faq_id = fields.Str(required=False)
class deleteFaq(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
category_id = fields.Str(required=False)
faq_id = fields.Str(required=False)
class getFaqByIdOrSlug(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id_or_slug = fields.Str(required=False)
class getLandingPages(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class createLandingPage(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateLandingPage(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class deleteLandingPage(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class getLegalInformation(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateLegalInformation(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getNavigations(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
device_platform = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class createNavigation(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getDefaultNavigations(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getNavigationBySlug(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
slug = fields.Str(required=False)
device_platform = fields.Str(required=False)
class updateNavigation(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class deleteNavigation(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class getPageMeta(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getPageSpec(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class createPagePreview(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updatePagePreview(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
slug = fields.Str(required=False)
class deletePage(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class updatePathRedirectionRules(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getPathRedirectionRules(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getSEOConfiguration(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateSEOConfiguration(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getSlideshows(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
device_platform = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class createSlideshow(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getSlideshowBySlug(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
slug = fields.Str(required=False)
device_platform = fields.Str(required=False)
class updateSlideshow(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class deleteSlideshow(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class getSupportInformation(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateSupportInformation(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateInjectableTag(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class deleteAllInjectableTags(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getInjectableTags(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class addInjectableTag(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class removeInjectableTag(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class editInjectableTag(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
tag_id = fields.Str(required=False)
class createPage(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getPages(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class updatePage(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class getPageBySlug(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
slug = fields.Str(required=False)
| 25.155642
| 52
| 0.57007
| 1,169
| 12,930
| 6.177074
| 0.076989
| 0.29525
| 0.357845
| 0.463094
| 0.84365
| 0.84365
| 0.838942
| 0.838942
| 0.838942
| 0.838942
| 0
| 0
| 0.351585
| 12,930
| 514
| 53
| 25.155642
| 0.861283
| 0.001315
| 0
| 0.714912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.280702
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ca3c008bc598f7cbc3731ed13ea8c4d21c98c31b
| 90
|
py
|
Python
|
eod/plugins/classification/models/__init__.py
|
Helicopt/EOD
|
b5db36f4ce267bf64d093b8174bde2c4097b4718
|
[
"Apache-2.0"
] | 196
|
2021-10-30T05:15:36.000Z
|
2022-03-30T18:43:40.000Z
|
eod/tasks/cls/models/__init__.py
|
YZW-explorer/EOD
|
f10e64de86c0f356ebf5c7e923f4042eec4207b1
|
[
"Apache-2.0"
] | 12
|
2021-10-30T11:33:28.000Z
|
2022-03-31T14:22:58.000Z
|
eod/tasks/cls/models/__init__.py
|
YZW-explorer/EOD
|
f10e64de86c0f356ebf5c7e923f4042eec4207b1
|
[
"Apache-2.0"
] | 23
|
2021-11-01T07:26:17.000Z
|
2022-03-27T05:55:37.000Z
|
from .heads import * # noqa
from .losses import * # noqa
from .postprocess import * # noqa
| 30
| 33
| 0.711111
| 12
| 90
| 5.333333
| 0.5
| 0.46875
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188889
| 90
| 3
| 33
| 30
| 0.876712
| 0.155556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ca3cd771bae91a442ab38172a996ac74ca0a06f6
| 222
|
py
|
Python
|
social/users/admin.py
|
RichardRosario/awesome-pinoy-social
|
c46192b356ead7f1fb4c672239eb8055d415f4f6
|
[
"MIT"
] | 9
|
2020-10-15T01:13:36.000Z
|
2021-05-02T23:15:04.000Z
|
social/users/admin.py
|
RichardRosario/awesome-pinoy-social
|
c46192b356ead7f1fb4c672239eb8055d415f4f6
|
[
"MIT"
] | 26
|
2020-10-21T22:41:10.000Z
|
2021-03-01T15:52:36.000Z
|
social/users/admin.py
|
RichardRosario/awesome-pinoy-social
|
c46192b356ead7f1fb4c672239eb8055d415f4f6
|
[
"MIT"
] | 2
|
2020-10-16T16:40:24.000Z
|
2020-12-02T11:52:53.000Z
|
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
@admin.register(get_user_model())
class UserAdmin(AuthUserAdmin):
pass
| 24.666667
| 64
| 0.824324
| 31
| 222
| 5.774194
| 0.483871
| 0.167598
| 0.284916
| 0.234637
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 222
| 8
| 65
| 27.75
| 0.90404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0.5
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ca4933266b8b70285a73542db0849f50c298bca9
| 164
|
py
|
Python
|
django_test/hmdb/models.py
|
wolframowy/mgr
|
9d61cef8d135e255f724f57ba55a0dc8c4269219
|
[
"MIT"
] | null | null | null |
django_test/hmdb/models.py
|
wolframowy/mgr
|
9d61cef8d135e255f724f57ba55a0dc8c4269219
|
[
"MIT"
] | null | null | null |
django_test/hmdb/models.py
|
wolframowy/mgr
|
9d61cef8d135e255f724f57ba55a0dc8c4269219
|
[
"MIT"
] | null | null | null |
from hmdb.submodels.spectra import *
from hmdb.submodels.metabolite import *
from hmdb.submodels.metabolite_names import *
from hmdb.submodels.biolocation import *
| 32.8
| 45
| 0.829268
| 21
| 164
| 6.428571
| 0.380952
| 0.237037
| 0.503704
| 0.511111
| 0.488889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 164
| 5
| 46
| 32.8
| 0.912162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ca5de6dfbd9af9a63888f9b8d6f987aeb4a8c697
| 122
|
py
|
Python
|
discord/embeds.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/embeds.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/embeds.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
from disnake.embeds import *
from disnake.embeds import __dict__ as __original_dict__
locals().update(__original_dict__)
| 24.4
| 56
| 0.836066
| 16
| 122
| 5.5
| 0.5625
| 0.25
| 0.386364
| 0.522727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098361
| 122
| 4
| 57
| 30.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ca6b8802b2319269e56377269a3d2c9d8cfcf5d9
| 9,763
|
py
|
Python
|
tests/components/rfxtrx/test_sensor.py
|
tobycheese/core
|
e5a081c7dd774f188afebfca09c00eb6e2b4f4c9
|
[
"Apache-2.0"
] | 6
|
2020-07-18T16:33:25.000Z
|
2021-09-26T09:52:04.000Z
|
tests/components/rfxtrx/test_sensor.py
|
tobycheese/core
|
e5a081c7dd774f188afebfca09c00eb6e2b4f4c9
|
[
"Apache-2.0"
] | 1
|
2020-10-27T23:58:15.000Z
|
2020-10-27T23:58:15.000Z
|
tests/components/rfxtrx/test_sensor.py
|
tobycheese/core
|
e5a081c7dd774f188afebfca09c00eb6e2b4f4c9
|
[
"Apache-2.0"
] | 3
|
2020-10-18T07:08:40.000Z
|
2021-06-21T02:26:00.000Z
|
"""The tests for the Rfxtrx sensor platform."""
from homeassistant.const import TEMP_CELSIUS, UNIT_PERCENTAGE
from homeassistant.setup import async_setup_component
from . import _signal_event
async def test_default_config(hass, rfxtrx):
"""Test with 0 sensor."""
await async_setup_component(
hass, "sensor", {"sensor": {"platform": "rfxtrx", "devices": {}}}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
async def test_one_sensor(hass, rfxtrx):
"""Test with 1 sensor."""
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rfxtrx",
"devices": {
"0a52080705020095220269": {
"name": "Test",
"data_type": "Temperature",
}
},
}
},
)
await hass.async_block_till_done()
state = hass.states.get("sensor.test_temperature")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == "Test Temperature"
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
async def test_one_sensor_no_datatype(hass, rfxtrx):
"""Test with 1 sensor."""
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rfxtrx",
"devices": {"0a52080705020095220269": {"name": "Test"}},
}
},
)
await hass.async_block_till_done()
base_id = "sensor.test"
base_name = "Test"
state = hass.states.get(f"{base_id}_temperature")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == f"{base_name} Temperature"
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
state = hass.states.get(f"{base_id}_humidity")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == f"{base_name} Humidity"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
state = hass.states.get(f"{base_id}_humidity_status")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == f"{base_name} Humidity status"
assert state.attributes.get("unit_of_measurement") == ""
state = hass.states.get(f"{base_id}_rssi_numeric")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == f"{base_name} Rssi numeric"
assert state.attributes.get("unit_of_measurement") == "dBm"
state = hass.states.get(f"{base_id}_battery_numeric")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == f"{base_name} Battery numeric"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
async def test_several_sensors(hass, rfxtrx):
"""Test with 3 sensors."""
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rfxtrx",
"devices": {
"0a52080705020095220269": {
"name": "Test",
"data_type": "Temperature",
},
"0a520802060100ff0e0269": {
"name": "Bath",
"data_type": ["Temperature", "Humidity"],
},
},
}
},
)
await hass.async_block_till_done()
state = hass.states.get("sensor.test_temperature")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == "Test Temperature"
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
state = hass.states.get("sensor.bath_temperature")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == "Bath Temperature"
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
state = hass.states.get("sensor.bath_humidity")
assert state
assert state.state == "unknown"
assert state.attributes.get("friendly_name") == "Bath Humidity"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
async def test_discover_sensor(hass, rfxtrx):
"""Test with discovery of sensor."""
await async_setup_component(
hass,
"sensor",
{"sensor": {"platform": "rfxtrx", "automatic_add": True, "devices": {}}},
)
await hass.async_block_till_done()
# 1
await _signal_event(hass, "0a520801070100b81b0279")
base_id = "sensor.0a520801070100b81b0279"
state = hass.states.get(f"{base_id}_humidity")
assert state
assert state.state == "27"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
state = hass.states.get(f"{base_id}_humidity_status")
assert state
assert state.state == "normal"
assert state.attributes.get("unit_of_measurement") == ""
state = hass.states.get(f"{base_id}_rssi_numeric")
assert state
assert state.state == "-64"
assert state.attributes.get("unit_of_measurement") == "dBm"
state = hass.states.get(f"{base_id}_temperature")
assert state
assert state.state == "18.4"
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
state = hass.states.get(f"{base_id}_battery_numeric")
assert state
assert state.state == "90"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
# 2
await _signal_event(hass, "0a52080405020095240279")
base_id = "sensor.0a52080405020095240279"
state = hass.states.get(f"{base_id}_humidity")
assert state
assert state.state == "36"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
state = hass.states.get(f"{base_id}_humidity_status")
assert state
assert state.state == "normal"
assert state.attributes.get("unit_of_measurement") == ""
state = hass.states.get(f"{base_id}_rssi_numeric")
assert state
assert state.state == "-64"
assert state.attributes.get("unit_of_measurement") == "dBm"
state = hass.states.get(f"{base_id}_temperature")
assert state
assert state.state == "14.9"
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
state = hass.states.get(f"{base_id}_battery_numeric")
assert state
assert state.state == "90"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
# 1 Update
await _signal_event(hass, "0a52085e070100b31b0279")
base_id = "sensor.0a520801070100b81b0279"
state = hass.states.get(f"{base_id}_humidity")
assert state
assert state.state == "27"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
state = hass.states.get(f"{base_id}_humidity_status")
assert state
assert state.state == "normal"
assert state.attributes.get("unit_of_measurement") == ""
state = hass.states.get(f"{base_id}_rssi_numeric")
assert state
assert state.state == "-64"
assert state.attributes.get("unit_of_measurement") == "dBm"
state = hass.states.get(f"{base_id}_temperature")
assert state
assert state.state == "17.9"
assert state.attributes.get("unit_of_measurement") == TEMP_CELSIUS
state = hass.states.get(f"{base_id}_battery_numeric")
assert state
assert state.state == "90"
assert state.attributes.get("unit_of_measurement") == UNIT_PERCENTAGE
assert len(hass.states.async_all()) == 10
async def test_discover_sensor_noautoadd(hass, rfxtrx):
"""Test with discover of sensor when auto add is False."""
await async_setup_component(
hass,
"sensor",
{"sensor": {"platform": "rfxtrx", "automatic_add": False, "devices": {}}},
)
await hass.async_block_till_done()
await _signal_event(hass, "0a520801070100b81b0279")
assert len(hass.states.async_all()) == 0
await _signal_event(hass, "0a52080405020095240279")
assert len(hass.states.async_all()) == 0
await _signal_event(hass, "0a52085e070100b31b0279")
assert len(hass.states.async_all()) == 0
async def test_update_of_sensors(hass, rfxtrx):
"""Test with 3 sensors."""
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rfxtrx",
"devices": {
"0a52080705020095220269": {
"name": "Test",
"data_type": "Temperature",
},
"0a520802060100ff0e0269": {
"name": "Bath",
"data_type": ["Temperature", "Humidity"],
},
},
}
},
)
await hass.async_block_till_done()
state = hass.states.get("sensor.test_temperature")
assert state
assert state.state == "unknown"
state = hass.states.get("sensor.bath_temperature")
assert state
assert state.state == "unknown"
state = hass.states.get("sensor.bath_humidity")
assert state
assert state.state == "unknown"
assert len(hass.states.async_all()) == 3
await _signal_event(hass, "0a520802060101ff0f0269")
await _signal_event(hass, "0a52080705020085220269")
state = hass.states.get("sensor.test_temperature")
assert state
assert state.state == "13.3"
state = hass.states.get("sensor.bath_temperature")
assert state
assert state.state == "51.1"
state = hass.states.get("sensor.bath_humidity")
assert state
assert state.state == "15"
assert len(hass.states.async_all()) == 3
| 32.009836
| 82
| 0.629827
| 1,095
| 9,763
| 5.401826
| 0.087671
| 0.17295
| 0.11716
| 0.133897
| 0.916145
| 0.865427
| 0.856298
| 0.831784
| 0.829417
| 0.829417
| 0
| 0.052048
| 0.242344
| 9,763
| 304
| 83
| 32.115132
| 0.7476
| 0.005634
| 0
| 0.741525
| 0
| 0
| 0.254035
| 0.097901
| 0
| 0
| 0
| 0
| 0.423729
| 1
| 0
| false
| 0
| 0.012712
| 0
| 0.012712
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ca81e84d2d6ecf841e50653666299d1be3918efb
| 93
|
py
|
Python
|
pfibs/block_preconditioners/__init__.py
|
iprotasov/pfibs
|
589724369b248971ba76da3f764f4b760b666761
|
[
"BSD-3-Clause"
] | 10
|
2019-02-08T19:37:48.000Z
|
2021-09-16T01:46:15.000Z
|
pfibs/block_preconditioners/__init__.py
|
iprotasov/pfibs
|
589724369b248971ba76da3f764f4b760b666761
|
[
"BSD-3-Clause"
] | 2
|
2019-04-09T17:14:38.000Z
|
2019-04-12T20:57:37.000Z
|
pfibs/block_preconditioners/__init__.py
|
iprotasov/pfibs
|
589724369b248971ba76da3f764f4b760b666761
|
[
"BSD-3-Clause"
] | 3
|
2019-04-09T17:19:09.000Z
|
2021-09-02T19:54:10.000Z
|
from pfibs.block_preconditioners.base import *
from pfibs.block_preconditioners.pcd import *
| 31
| 46
| 0.849462
| 12
| 93
| 6.416667
| 0.583333
| 0.233766
| 0.363636
| 0.753247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086022
| 93
| 2
| 47
| 46.5
| 0.905882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
046454588551a0f3d5351765098463aa03231c71
| 2,281
|
py
|
Python
|
AOC2017_1a.py
|
ghexp/aoc2017
|
0b51d78b8f3fe78958ad36dd9ff73de86d7e90b3
|
[
"Apache-2.0"
] | null | null | null |
AOC2017_1a.py
|
ghexp/aoc2017
|
0b51d78b8f3fe78958ad36dd9ff73de86d7e90b3
|
[
"Apache-2.0"
] | null | null | null |
AOC2017_1a.py
|
ghexp/aoc2017
|
0b51d78b8f3fe78958ad36dd9ff73de86d7e90b3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
instr = '7385764686251444473997915123782972536343732657517834671759462795461213782428342931896181695578996274321317419242359534783957372932953774336338118488967172727651862498838195317654289797558683458511126996217953322817229372373455862177844478443391835484591525235651863464891177927244954925827786799436536592561374269299474738321293575385899438446558569241236278779779983587912431395475244796538888373287186921647426866237756737342731976763959499149996315591584716122199183295277439872911371313924594486766479438544417416529743495114819825984524437367225234184772617942525954961136976875325182725754768372684531972614455134523596338355374444273522115362238734383164778129376628621497662965456761631796178353599629887665939521892447361219479646483978798392716119793282717739524897385958273726776318154977675546287789874265339688753977185129334929715486381875286278528247696464162297691698154712775589541945263574897266575996455547625537947927972497979333932115165151462742216327321116291372396585618664475715321298122335789262942284571328414569375464386446824882551918843185195829547373915482687534432942778312542752798313434628498295216692646713137244198123219531693559848915834623825919191532658735422176965451741869666714874158492556445954852299161868651448123825821775363219246244515946392686275545561989355573946924767442253465342753995764791927951158771231944177692469531494559697911176613943396258141822244578457498361352381518166587583342233816989329544415621127397996723997397219676486966684729653763525768655324443991129862129181215339947555257279592921258246646215764736698583211625887436176149251356452358211458343439374688341116529726972434697324734525114192229641464227986582845477741747787673588848439713619326889624326944553386782821633538775371915973899959295232927996742218926514374168947582441892731462993481877277714436887597223871881149693228928442427611664655772333471893735932419937832937953495929514837663883938416644387342825836673733778119481514427512453357628396666791547531814844176342696362416842993761919369994779897357348334197721735231299249116477'
total = 0
for c in range(-1,len(instr)-1):
# print int(instr[c]), instr[c] == instr[c+1]
if instr[c] == instr[c+1]:
total = total + int(instr[c])
print total
| 228.1
| 2,084
| 0.960982
| 38
| 2,281
| 57.684211
| 0.447368
| 0.016423
| 0.015055
| 0.016423
| 0.011861
| 0
| 0
| 0
| 0
| 0
| 0
| 0.929785
| 0.019728
| 2,281
| 9
| 2,085
| 253.444444
| 0.050537
| 0.029373
| 0
| 0
| 0
| 0
| 0.937613
| 0.937613
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.166667
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
04a05eb2fd74ea1892039dc7b448e25be8c927dc
| 2,428
|
py
|
Python
|
backend/apps/core/session.py
|
ambient-innovation/workshop-django
|
3105f6684097a4443919edc0ac300f64937a1371
|
[
"MIT"
] | null | null | null |
backend/apps/core/session.py
|
ambient-innovation/workshop-django
|
3105f6684097a4443919edc0ac300f64937a1371
|
[
"MIT"
] | null | null | null |
backend/apps/core/session.py
|
ambient-innovation/workshop-django
|
3105f6684097a4443919edc0ac300f64937a1371
|
[
"MIT"
] | null | null | null |
class SessionService:
SESSION_LAST_SET_DUT_COMPANY_ID = 'last_set_dut_company_id'
SESSION_LAST_SET_DUT_CATEGORY_ID = 'last_set_dut_category_id'
SESSION_LAST_SET_DUT_LOCATION_ID = 'last_set_dut_location_id'
SESSION_LAST_SET_TEMPLATE_COMPANY_ID = 'last_set_template_company_id'
SESSION_LAST_SET_TEMPLATE_INSPECTION_TYPE_ID = 'last_set_template_inspection_type_id'
'''
DUT
'''
@staticmethod
def get_last_set_dut_company_id(request) -> int:
return request.session.get(SessionService.SESSION_LAST_SET_DUT_COMPANY_ID, None)
@staticmethod
def set_last_set_dut_company_id(request, company_id: int) -> None:
request.session[SessionService.SESSION_LAST_SET_DUT_COMPANY_ID] = company_id
request.session.modified = True
@staticmethod
def get_last_set_dut_category_id(request) -> int:
return request.session.get(SessionService.SESSION_LAST_SET_DUT_CATEGORY_ID, None)
@staticmethod
def set_last_set_dut_category_id(request, category_id: int) -> None:
request.session[SessionService.SESSION_LAST_SET_DUT_CATEGORY_ID] = category_id
request.session.modified = True
@staticmethod
def get_last_set_dut_location_id(request) -> int:
return request.session.get(SessionService.SESSION_LAST_SET_DUT_LOCATION_ID, None)
@staticmethod
def set_last_set_dut_location_id(request, location_id: int) -> None:
request.session[SessionService.SESSION_LAST_SET_DUT_LOCATION_ID] = location_id
request.session.modified = True
'''
InspectionTemplate
'''
@staticmethod
def get_last_set_template_company_id(request) -> int:
return request.session.get(SessionService.SESSION_LAST_SET_TEMPLATE_COMPANY_ID, None)
@staticmethod
def set_last_set_template_company_id(request, company_id: int) -> None:
request.session[SessionService.SESSION_LAST_SET_TEMPLATE_COMPANY_ID] = company_id
request.session.modified = True
@staticmethod
def get_last_set_template_inspection_type_id(request) -> int:
return request.session.get(SessionService.SESSION_LAST_SET_TEMPLATE_INSPECTION_TYPE_ID, None)
@staticmethod
def set_last_set_template_inspection_type_id(request, inspection_type_id: int) -> None:
request.session[SessionService.SESSION_LAST_SET_TEMPLATE_INSPECTION_TYPE_ID] = inspection_type_id
request.session.modified = True
| 39.803279
| 105
| 0.772241
| 317
| 2,428
| 5.400631
| 0.072555
| 0.122664
| 0.10514
| 0.179907
| 0.949766
| 0.868575
| 0.735981
| 0.643107
| 0.512266
| 0.512266
| 0
| 0
| 0.158567
| 2,428
| 60
| 106
| 40.466667
| 0.837983
| 0
| 0
| 0.365854
| 0
| 0
| 0.056842
| 0.056842
| 0
| 0
| 0
| 0
| 0
| 1
| 0.243902
| false
| 0
| 0
| 0.121951
| 0.512195
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
04afdf5ffb242915238b709155403affb8f0d4e1
| 170
|
py
|
Python
|
test_file.py
|
morganoh/Python_with_Kirk
|
11bffec8601e553ccebad5b431702627239f72a1
|
[
"Apache-2.0"
] | null | null | null |
test_file.py
|
morganoh/Python_with_Kirk
|
11bffec8601e553ccebad5b431702627239f72a1
|
[
"Apache-2.0"
] | null | null | null |
test_file.py
|
morganoh/Python_with_Kirk
|
11bffec8601e553ccebad5b431702627239f72a1
|
[
"Apache-2.0"
] | null | null | null |
print "hello world !!"
print "Adding second line for printing !!"
print "Hello everybody from master branch"
for i in range (5):
print "hello world , it's me !!"
| 18.888889
| 42
| 0.670588
| 26
| 170
| 4.384615
| 0.730769
| 0.263158
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007519
| 0.217647
| 170
| 8
| 43
| 21.25
| 0.849624
| 0
| 0
| 0
| 0
| 0
| 0.623529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.8
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
04feadbcb79ad0a8318d3864dbbd4de5f6becb7e
| 10,149
|
py
|
Python
|
test/hummingbot/connector/exchange/ftx/test_ftx_in_flight_order.py
|
pecuniafinance/hummingbot
|
2cbb19c187a429d3e6000dc938617ca2a1f9f357
|
[
"Apache-2.0"
] | 542
|
2021-12-17T22:34:31.000Z
|
2022-03-31T14:36:23.000Z
|
test/hummingbot/connector/exchange/ftx/test_ftx_in_flight_order.py
|
pecuniafinance/hummingbot
|
2cbb19c187a429d3e6000dc938617ca2a1f9f357
|
[
"Apache-2.0"
] | 291
|
2021-12-17T20:07:53.000Z
|
2022-03-31T11:07:23.000Z
|
test/hummingbot/connector/exchange/ftx/test_ftx_in_flight_order.py
|
pecuniafinance/hummingbot
|
2cbb19c187a429d3e6000dc938617ca2a1f9f357
|
[
"Apache-2.0"
] | 220
|
2021-12-17T12:41:23.000Z
|
2022-03-31T23:03:22.000Z
|
from datetime import datetime
from decimal import Decimal
from unittest import TestCase
from hummingbot.connector.exchange.ftx.ftx_in_flight_order import FtxInFlightOrder
from hummingbot.core.data_type.common import OrderType, TradeType
class FtxInFlightOrderTests(TestCase):
def setUp(self):
super().setUp()
self.base_token = "BTC"
self.quote_token = "USDT"
self.trading_pair = f"{self.base_token}-{self.quote_token}"
def test_creation_from_json(self):
order_info = {
"client_order_id": "OID1",
"exchange_order_id": "EOID1",
"trading_pair": self.trading_pair,
"order_type": OrderType.LIMIT.name,
"trade_type": TradeType.BUY.name,
"price": "1000",
"amount": "1",
"creation_timestamp": 1640001112.0,
"executed_amount_base": "0.5",
"executed_amount_quote": "500",
"fee_asset": "USDT",
"fee_paid": "5",
"last_state": "closed",
}
order = FtxInFlightOrder.from_json(order_info)
self.assertEqual(order_info["client_order_id"], order.client_order_id)
self.assertEqual(order_info["exchange_order_id"], order.exchange_order_id)
self.assertEqual(order_info["trading_pair"], order.trading_pair)
self.assertEqual(OrderType.LIMIT, order.order_type)
self.assertEqual(TradeType.BUY, order.trade_type)
self.assertEqual(Decimal(order_info["price"]), order.price)
self.assertEqual(Decimal(order_info["amount"]), order.amount)
self.assertEqual(1640001112.0, order.creation_timestamp)
self.assertEqual(order_info["last_state"], order.last_state)
self.assertEqual(Decimal(order_info["executed_amount_base"]), order.executed_amount_base)
self.assertEqual(Decimal(order_info["executed_amount_quote"]), order.executed_amount_quote)
self.assertEqual(Decimal(order_info["fee_paid"]), order.fee_paid)
self.assertEqual(order_info["fee_asset"], order.fee_asset)
self.assertEqual(order_info, order.to_json())
def test_fee_asset_is_based_on_order_type(self):
order = FtxInFlightOrder(
client_order_id="OID1",
exchange_order_id="38065410",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1),
creation_timestamp=datetime.now().timestamp()
)
self.assertEqual(order.base_asset, order.fee_asset)
order = FtxInFlightOrder(
client_order_id="OID1",
exchange_order_id="38065410",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT_MAKER,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1),
creation_timestamp=datetime.now().timestamp()
)
self.assertEqual(order.base_asset, order.fee_asset)
order = FtxInFlightOrder(
client_order_id="OID1",
exchange_order_id="38065410",
trading_pair=self.trading_pair,
order_type=OrderType.MARKET,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1),
creation_timestamp=datetime.now().timestamp()
)
self.assertEqual(order.quote_asset, order.fee_asset)
def test_update_with_partial_trade_event(self):
order = FtxInFlightOrder(
client_order_id="OID1",
exchange_order_id="38065410",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1),
creation_timestamp=datetime.now().timestamp()
)
trade_event_info = {
"fee": 10.0,
"feeRate": 0.0014,
"feeCurrency": "ETH",
"future": None,
"id": 7828307,
"liquidity": "taker",
"market": "BTC-USDT",
"orderId": 38065410,
"tradeId": 1,
"price": 10050.0,
"side": "buy",
"size": 0.1,
"time": "2019-05-07T16:40:58.358438+00:00",
"type": "order"
}
update_result = order.update_with_trade_update(trade_event_info)
self.assertTrue(update_result)
self.assertFalse(order.is_done)
self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base)
expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(
str(trade_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
self.assertEqual(Decimal(trade_event_info["fee"]), order.fee_paid)
self.assertEqual(trade_event_info["feeCurrency"], order.fee_asset)
def test_update_with_full_fill_trade_event(self):
order = FtxInFlightOrder(
client_order_id="OID1",
exchange_order_id="38065410",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1),
creation_timestamp=datetime.now().timestamp()
)
trade_event_info = {
"fee": 10.0,
"feeRate": 0.0014,
"feeCurrency": "ETH",
"future": None,
"id": 7828307,
"liquidity": "taker",
"market": "BTC-USDT",
"orderId": 38065410,
"tradeId": 19129310,
"price": 10050.0,
"side": "buy",
"size": 0.1,
"time": "2019-05-07T16:40:58.358438+00:00",
"type": "order"
}
update_result = order.update_with_trade_update(trade_event_info)
self.assertTrue(update_result)
self.assertFalse(order.is_done)
self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base)
expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(
str(trade_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
self.assertEqual(Decimal(trade_event_info["fee"]), order.fee_paid)
self.assertEqual(trade_event_info["feeCurrency"], order.fee_asset)
complete_event_info = {
"fee": 50.0,
"feeRate": 0.0014,
"feeCurrency": "ETH",
"future": None,
"id": 7828307,
"liquidity": "taker",
"market": "BTC-USDT",
"orderId": 38065410,
"tradeId": 2,
"price": 10060.0,
"side": "buy",
"size": 0.9,
"time": "2019-05-07T16:40:58.358438+00:00",
"type": "order"
}
update_result = order.update_with_trade_update(complete_event_info)
self.assertTrue(update_result)
self.assertFalse(order.is_done)
self.assertEqual(order.amount, order.executed_amount_base)
expected_executed_quote_amount += Decimal(str(complete_event_info["size"])) * Decimal(
str(complete_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
self.assertEqual(Decimal(trade_event_info["fee"]) + Decimal(complete_event_info["fee"]),
order.fee_paid)
self.assertEqual(complete_event_info["feeCurrency"], order.fee_asset)
def test_update_with_repeated_trade_id_is_ignored(self):
order = FtxInFlightOrder(
client_order_id="OID1",
exchange_order_id="38065410",
trading_pair=self.trading_pair,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal(10000),
amount=Decimal(1),
creation_timestamp=datetime.now().timestamp()
)
trade_event_info = {
"fee": 10.0,
"feeRate": 0.0014,
"feeCurrency": "ETH",
"future": None,
"id": 7828307,
"liquidity": "taker",
"market": "BTC-USDT",
"orderId": 38065410,
"tradeId": 1,
"price": 10050.0,
"side": "buy",
"size": 0.1,
"time": "2019-05-07T16:40:58.358438+00:00",
"type": "order"
}
update_result = order.update_with_trade_update(trade_event_info)
self.assertTrue(update_result)
self.assertFalse(order.is_done)
self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base)
expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(
str(trade_event_info["price"]))
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
self.assertEqual(Decimal(trade_event_info["fee"]), order.fee_paid)
self.assertEqual(trade_event_info["feeCurrency"], order.fee_asset)
complete_event_info = {
"fee": 50.0,
"feeRate": 0.0014,
"feeCurrency": "ETH",
"future": None,
"id": 7828307,
"liquidity": "taker",
"market": "BTC-USDT",
"orderId": 38065410,
"tradeId": 1,
"price": 10060.0,
"side": "buy",
"size": 0.9,
"time": "2019-05-07T16:40:58.358438+00:00",
"type": "order"
}
update_result = order.update_with_trade_update(complete_event_info)
self.assertFalse(update_result)
self.assertFalse(order.is_done)
self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base)
self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote)
self.assertEqual(Decimal(trade_event_info["fee"]), order.fee_paid)
self.assertEqual(trade_event_info["feeCurrency"], order.fee_asset)
| 38.298113
| 99
| 0.602621
| 1,094
| 10,149
| 5.308044
| 0.105119
| 0.095574
| 0.060272
| 0.034441
| 0.814534
| 0.785604
| 0.770966
| 0.736869
| 0.730153
| 0.721715
| 0
| 0.056177
| 0.277367
| 10,149
| 264
| 100
| 38.443182
| 0.735615
| 0
| 0
| 0.703863
| 0
| 0
| 0.121194
| 0.023451
| 0
| 0
| 0
| 0
| 0.201717
| 1
| 0.025751
| false
| 0
| 0.021459
| 0
| 0.051502
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f3d9da6e5cee117bc681adf890f5dd2a99344e2d
| 5,399
|
py
|
Python
|
matlab/src/sumpdf.py
|
DPBayes/robust-private-lr
|
ec36e51a52ea303055bbb72c129d12b4d21dcd8a
|
[
"MIT"
] | 1
|
2018-06-08T08:48:46.000Z
|
2018-06-08T08:48:46.000Z
|
matlab/src/sumpdf.py
|
DPBayes/robust-private-lr
|
ec36e51a52ea303055bbb72c129d12b4d21dcd8a
|
[
"MIT"
] | null | null | null |
matlab/src/sumpdf.py
|
DPBayes/robust-private-lr
|
ec36e51a52ea303055bbb72c129d12b4d21dcd8a
|
[
"MIT"
] | null | null | null |
import autograd.numpy as np
from autograd import grad, value_and_grad
import numpy as np_orig
import numpy.random as npr
import scipy.stats as sps
import matplotlib.pyplot as plt
from scipy.optimize import minimize
from scipy.special import erfc
# > y := integrate(1/sqrt(2*Pi*a^2) * exp(\
# > -(t-x)^2/(2*a^2)) * exp(-t/b) / (2*b), t=-infinity..0);
# /
# |
# y := lim -1/4 csgn(a) exp(- x/b) |
# t -> (-infinity) |
# \
# 2 1/2 2
# 2 a - 2 b x 2 (a + b t - b x)
# csgn(a) exp(x/b) exp(----------) erf(---------------------)
# 2 2 a b
# 2 b
# 1/2 2 2 \
# 2 (a - b x) a |
# - erf(---------------) exp(----)|/b
# 2 a b 2 |
# 2 b /
# > z := integrate(1/sqrt(2*Pi*a^2) * exp(\
# > -(t-x)^2/(2*a^2)) * exp(t/b) / (2*b), t=0..infinity);
# 2
# a + 2 b x
# z := lim -1/4 csgn(a) exp(----------)
# t -> infinity 2
# 2 b
# / 1/2 2 1/2 2 \
# | 2 (a - b t + b x) 2 2 (a + b x) |
# |erf(---------------------) csgn(a) - erf(---------------)|/b
# \ 2 a b 2 a b /
# > y + z assuming a > 0, b > 0;
# 1/2 2 2 2
# 2 (a - b x) a a
# erf(---------------) exp(----) + exp(----)
# 2 a b 2 2
# 2 b 2 b
# 1/4 ------------------------------------------
# b exp(x/b)
# 2 2 1/2 2
# a a 2 (a + b x)
# exp(x/b) exp(----) + exp(----) exp(x/b) erf(---------------)
# 2 2 2 a b
# 2 b 2 b
# + 1/4 ------------------------------------------------------------
# b
# > simplify(y + z assuming a > 0, b > 0);
# 2
# a - 2 b x
# 1/4 exp(----------)
# 2
# 2 b
# / 1/2 2 1/2 2 \
# | 2 x 2 (a + b x) 2 x 2 (a - b x) |
# |exp(---) erf(---------------) + exp(---) + erf(---------------) + 1|/b
# \ b 2 a b b 2 a b /
# > y := integrate(1/sqrt(2*Pi*a^2) * exp(\
# > -(t-x)^2/(2*a^2)) * exp(t/b) / (2*b), t=-infinity..0);
# 2
# a + 2 b x
# y := lim -1/4 csgn(a) exp(----------)
# t -> (-infinity) 2
# 2 b
# / 1/2 2 1/2 2 \
# | 2 (a - b t + b x) 2 2 (a + b x) |
# |-erf(---------------------) csgn(a) + erf(---------------)|/b
# \ 2 a b 2 a b /
# > z := integrate(1/sqrt(2*Pi*a^2) * exp(\
# > -(t-x)^2/(2*a^2)) * exp(-t/b) / (2*b), t=0..infinity);
# memory used=308.9MB, alloc=149.4MB, time=4.95
# /
# |
# z := lim 1/4 csgn(a) exp(- x/b) |
# t -> infinity |
# \
# 2 1/2 2
# 2 a - 2 b x 2 (a + b t - b x)
# csgn(a) exp(x/b) exp(----------) erf(---------------------)
# 2 2 a b
# 2 b
# 1/2 2 2 \
# 2 (a - b x) a |
# - erf(---------------) exp(----)|/b
# 2 a b 2 |
# 2 b /
# > q := simplify(y + z assuming a > 0, b > 0);
# 2
# a - 2 b x
# q := -1/4 exp(----------)
# 2
# 2 b
# / 1/2 2 1/2 2 \
# | 2 x 2 (a + b x) 2 x 2 (a - b x) |
# |exp(---) erf(---------------) - exp(---) + erf(---------------) - 1|/b
# \ b 2 a b b 2 a b /
def lsumpdf(x, a, b):
return -np.log(4*b) + ((a**2 - 2*b*x) / (2 * b**2)) \
+ np.log(np.exp(2*x/b) *
erfc((a**2 + b*x)/(np.sqrt(2) * a * b))
+ erfc((a**2 - b*x)/(np.sqrt(2) * a * b)))
| 42.179688
| 81
| 0.19448
| 565
| 5,399
| 1.853097
| 0.099115
| 0.08978
| 0.08596
| 0.038204
| 0.696275
| 0.672397
| 0.663801
| 0.633238
| 0.625597
| 0.625597
| 0
| 0.0931
| 0.608076
| 5,399
| 127
| 82
| 42.511811
| 0.401701
| 0.87331
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.615385
| 0.076923
| 0.769231
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6d4706eba5754341facdfe101a19ddb2a09b15d8
| 32,658
|
py
|
Python
|
image/image.py
|
Onii-Chan-Discord/onii-cogs
|
ec0a22e9a1dcd8fd0617448ba21b4c39ee113bd5
|
[
"Apache-2.0"
] | 1
|
2021-07-11T18:46:08.000Z
|
2021-07-11T18:46:08.000Z
|
image/image.py
|
Onii-Chan-Discord/onii-cogs
|
ec0a22e9a1dcd8fd0617448ba21b4c39ee113bd5
|
[
"Apache-2.0"
] | 35
|
2021-06-05T06:33:34.000Z
|
2022-03-22T01:44:00.000Z
|
image/image.py
|
Onii-Chan-Discord/onii-cogs
|
ec0a22e9a1dcd8fd0617448ba21b4c39ee113bd5
|
[
"Apache-2.0"
] | 4
|
2021-07-07T04:29:33.000Z
|
2021-12-31T12:12:00.000Z
|
"""
Copyright 2021 Onii-chan
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import asyncio
import logging
import random
import aiohttp
import discord
from redbot.core import Config, commands
async def api_call(call_uri, returnObj=False):
async with aiohttp.ClientSession() as session:
async with session.get(f"{call_uri}") as response:
response = await response.json()
if returnObj == False:
return response["url"]
elif returnObj == True:
return response
await session.close()
log = logging.getLogger("red.onii.image")
class Image(commands.Cog):
"""Get tons of memes or other images"""
__author__ = ["Onii-chan"]
__version__ = "3.3.0"
def format_help_for_context(self, ctx: commands.Context) -> str:
"""Thanks Sinbad!"""
pre_processed = super().format_help_for_context(ctx)
return f"{pre_processed}\n\nAuthors: {', '.join(self.__author__)}\nCog Version: {self.__version__}"
async def red_get_data_for_user(self, *, user_id: int):
"""
This cog does not story any end user data.
"""
return {}
async def red_delete_data_for_user(self, **kwargs):
"""
Nothing to delete.
"""
return
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def naruto(self, ctx: commands.Context):
"""Shows some naruto wallpapers from reddit.
Wallpapers shown are taken from r/narutowallpapers.
"""
await ctx.trigger_typing()
async with aiohttp.ClientSession() as session:
async with session.get(
"https://api.martinebot.com/v1/images/subreddit?name=narutowallpapers"
) as resp:
origin = await resp.json()
data = origin["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
try:
await ctx.reply(
embed=embed,
mention_author=False,
)
except discord.HTTPException:
await ctx.send("Something went wrong while posting an image.")
@commands.command()
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def subr(self, ctx: commands.Context, reddit: str):
"""Shows some images form the specified subreddit.
Warning: Some Images Could Be Considered Nsfw In Some Servers.
"""
await ctx.trigger_typing()
async with aiohttp.ClientSession() as session:
async with session.get(
f"https://api.martinebot.com/v1/images/subreddit?name={reddit}"
) as resp:
origin = await resp.json()
if not origin["success"]:
embed = discord.Embed(
title="That subreddit doesn't seem to exist...",
colour=discord.Colour.random(),
description=(
"**I did my best to find '{}', but my search yielded no results.**\n"
"**Please check for any mistakes in the name and try again.**"
).format(reddit),
)
return await ctx.reply(embed=embed, mention_author=False)
data = origin["data"]
image_url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=image_url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
return await ctx.reply(embed=embed, mention_author=False)
@commands.command(name="randomwallpaper", aliases=["raw"])
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def wallpaper_random(self, ctx: commands.Context):
"""Shows some anime wallpaper from reddit.
Wallpapers shown are taken from random subreddits.
Warning: Some Images Could Be Considered Nsfw In Some Servers.
"""
await ctx.trigger_typing()
SUBREDDITS = ["images/subreddits?name=Animewallpaper", "images/wallpaper"]
API = random.choice(SUBREDDITS)
async with aiohttp.ClientSession() as session:
async with session.get(f"https://api.martinebot.com/v1/{API}") as resp:
origin = await resp.json()
data = origin["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
await ctx.reply(
embed=embed,
mention_author=False,
)
@commands.command(name="randomavatar", aliases=["rav"])
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def avatar_random(self, ctx: commands.Context):
"""Shows some anime profile pictures from reddit.
Pictures shown are taken from r/AnimePFP.
"""
await ctx.trigger_typing()
async with aiohttp.ClientSession() as session:
async with session.get(
"https://api.martinebot.com/v1/images/subreddit?name=AnimePFP"
) as resp:
origin = await resp.json()
data = origin["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
await ctx.reply(
embed=embed,
mention_author=False,
)
@commands.command()
@commands.guild_only()
async def neko(self, ctx):
embed = discord.Embed(
title="Neko's For You!",
color=discord.Colour.random(),
timestamp=ctx.message.created_at,
)
embed.set_footer(
text="Powered by nekos.best",
icon_url=ctx.message.author.avatar_url,
)
embed.set_author(name=self.bot.user.display_name, icon_url=self.bot.user.avatar_url)
embed.set_image(url=await api_call("https://nekos.best/nekos"))
await ctx.reply(embed=embed, mention_author=False)
@commands.command(aliases=["memes"])
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def meme(self, ctx: commands.Context):
"""Shows some memes from reddit.
Memes shown are taken from the subreddit set by the admins.
"""
await ctx.trigger_typing()
async with aiohttp.ClientSession() as session:
async with session.get("https://api.martinebot.com/v1/images/memes") as resp:
origin = await resp.json()
data = origin["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content."
" Please turn nsfw on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
await ctx.reply(
embed=embed,
mention_author=False,
)
@commands.command()
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def space(self, ctx: commands.Context):
"""Shows some space images from reddit.
Images shown are taken from r/spaceengine and r/LandscapeAstro.
"""
await ctx.trigger_typing()
SUBREDDITS = ["spaceengine", "LandscapeAstro"]
API = random.choice(SUBREDDITS)
async with aiohttp.ClientSession() as session:
async with session.get(
f"https://api.martinebot.com/v1/images/subreddit?name={API}"
) as resp:
origin = await resp.json()
data = origin["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
await ctx.reply(
embed=embed,
mention_author=False,
)
@commands.command()
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def moe(self, ctx: commands.Context):
"""Shows some moe images from reddit.
Images shown are taken from:
r/awwnime, r/animeboys, r/cuteanimeboys and r/CuteAnimeGirls.
"""
await ctx.trigger_typing()
SUBREDDITS = ["animeboys", "CuteAnimeGirlss", "cuteanimeboys", "awwnime"]
API = random.choice(SUBREDDITS)
async with aiohttp.ClientSession() as session:
async with session.get(
f"https://api.martinebot.com/v1/images/subreddit?name={API}"
) as resp:
origin = await resp.json()
data = origin["data"]
subreddit = data["subreddit"] or ""
url = data["image_url"]
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
try:
await ctx.reply(
embed=embed,
mention_author=False,
)
except discord.HTTPException:
await ctx.send("Something went wrong while posting an image.")
@commands.command()
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def scenery(self, ctx: commands.Context):
"""Shows some scenery from reddit.
Images shown are taken from r/EarthPorn.
"""
await ctx.trigger_typing()
async with aiohttp.ClientSession() as session:
async with session.get(
"https://api.martinebot.com/v1/images/subreddit?name=EarthPorn"
) as resp:
origin = await resp.json()
data = origin["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
await ctx.reply(
embed=embed,
mention_author=False,
)
@commands.command()
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def unix(self, ctx: commands.Context):
"""Shows some unix images from reddit.
Images shown are taken from r/UnixPorn.
"""
await ctx.trigger_typing()
async with aiohttp.ClientSession() as session:
async with session.get(
"https://api.martinebot.com/v1/images/subreddit?name=UnixPorn"
) as resp:
origin = await resp.json()
data = origin["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
if data["nsfw"] and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
await ctx.reply(
embed=embed,
mention_author=False,
)
@commands.command(aliases=["celeb"])
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def celebrity(self, ctx: commands.Context):
"""Shows some imagesof celebrities from reddit.
Images shown are taken from:
r/UltraHighResCelebs, r/HighResCelebs and r/UHQcelebs.
"""
await ctx.trigger_typing()
if not ctx.channel.is_nsfw():
return await ctx.send("Sorry but this is nsfw")
SUBREDDITS = [
"UltraHighResCeleb",
"HighResCelebs",
"UHQcelebs",
]
API = random.choice(SUBREDDITS)
async with aiohttp.ClientSession() as session:
async with session.get(
f"https://api.martinebot.com/v1/images/subreddit?name={API}"
) as resp:
data = await resp.json()
data = data["data"]
url = data["image_url"]
subreddit = data["subreddit"] or ""
sub_name = subreddit["name"] or "Unknown"
sub_url = subreddit["url"] or ""
author = data["author"] or ""
r_author = author["name"] or "Unknown"
r_author_url = author["url"] or ""
title = data["title"] or ""
created_at = data["created_at"] or ""
downvotes = data["downvotes"] or ""
comments = data["comments"] or ""
ups = data["upvotes"] or ""
link = data["post_url"] or ""
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
await ctx.reply(
embed=embed,
mention_author=False,
)
@commands.command()
@commands.guild_only()
@commands.cooldown(1, 5, commands.BucketType.user)
async def test(self, ctx: commands.Context):
"""Shows some moe images from reddit.
Images shown are taken from:
r/awwnime, r/animeboys, r/cuteanimeboys and r/CuteAnimeGirls.
"""
await ctx.trigger_typing()
SUBREDDITS = ["animeboys", "CuteAnimeGirlss", "cuteanimeboys", "awwnime"]
API = random.choice(SUBREDDITS)
# async with aiohttp.ClientSession() as session:
# async with session.get(
# f"https://api.martinebot.com/v1/images/subreddit?name={API}"
# ) as resp:
# origin = await resp.json()
# data = origin["data"]
# url = data["image_url"]
# subreddit = data["subreddit"] or ""
# sub_name = subreddit["name"] or "Unknown"
# sub_url = subreddit["url"] or ""
# author = data["author"] or ""
# r_author = author["name"] or "Unknown"
# r_author_url = author["url"] or ""
# title = data["title"] or ""
# created_at = data["created_at"] or ""
# downvotes = data["downvotes"] or ""
# comments = data["comments"] or ""
# ups = data["upvotes"] or ""
# link = data["post_url"] or ""
async with aiohttp.ClientSession() as session:
async with session.get(f"https://www.reddit.com/{API}/new.json?sort=new") as resp:
data = await resp.json()
data1 = data["data"]
children = data1["children"]
post = random.choice(children)["data"]
title = post["title"] or ""
url = post["url_overridden_by_dest"] or ""
link = f'https://reddit.com{post["permalink"]}' or ""
ups = post["ups"] or ""
comments = post["num_comments"] or ""
subreddit = post["subreddit_name_prefixed"] or ""
sub_name = post["subreddit"] or "Unknown"
sub_url = f"https://reddit.com/{subreddit}/"
author = post["author"] or ""
r_author = post["author"] or "Unknown"
r_author_url = f"https://reddit.com/u/{author}" or ""
title = post["title"] or ""
created_at = post["created_utc"] or ""
downvotes = post["downs"] or ""
if post["over_18"] is True and not ctx.channel.is_nsfw():
return await ctx.send(
"Sorry the contents of this post are NSFW and this channel isn't set to allow NSFW content, please it on and try again later."
)
embed = discord.Embed(
title="Here's a random image...:frame_photo:",
colour=discord.Colour.random(),
description=(
"**Post by:** [u/{}]({})\n"
"**From:** [r/{}]({})\n"
"**This post was created on:** <t:{}:F>\n"
"**Title:** [{}]({})"
).format(
r_author,
r_author_url,
sub_name,
sub_url,
created_at,
title,
link,
),
)
embed.set_image(url=url)
embed.set_footer(
text="👍 {} • 👎 {} • 💬 {} • martinebot.com API".format(
ups,
downvotes,
comments,
),
icon_url=ctx.message.author.avatar_url,
)
try:
await ctx.reply(
embed=embed,
mention_author=False,
)
except discord.HTTPException:
await ctx.send("Something went wrong while posting an image.")
| 37.537931
| 150
| 0.474861
| 3,338
| 32,658
| 4.565309
| 0.087777
| 0.02113
| 0.015093
| 0.024739
| 0.821642
| 0.81193
| 0.795459
| 0.788503
| 0.778529
| 0.769801
| 0
| 0.002457
| 0.401831
| 32,658
| 869
| 151
| 37.581128
| 0.774251
| 0.046206
| 0
| 0.807799
| 0
| 0.013928
| 0.201584
| 0.012487
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002786
| false
| 0
| 0.008357
| 0
| 0.04039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d5af1ebb355e954310d434c8c698e851419c57a
| 67,888
|
py
|
Python
|
sirius_si.py
|
lnls-fac/sirius_wiki
|
1995cffa149a55cd8f0184cf8a8f04492cfc347c
|
[
"MIT"
] | 2
|
2015-04-13T23:20:24.000Z
|
2015-04-13T23:47:18.000Z
|
sirius_si.py
|
lnls-fac/sirius_wiki
|
1995cffa149a55cd8f0184cf8a8f04492cfc347c
|
[
"MIT"
] | 1
|
2015-04-14T04:20:36.000Z
|
2015-04-14T04:20:52.000Z
|
sirius_si.py
|
lnls-fac/sirius_wiki
|
1995cffa149a55cd8f0184cf8a8f04492cfc347c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from parameter import Parameter
from definitions import ParameterDefinitions as Prms
'''Storage ring parameters
======================='''
si_lattice_version = 'V03'
si_lattice_type = '5BA'
si_lattice_circumference = 518.396 #[m]
si_lattice_symmetry = 10
si_lattice_long_straight_section_length = 7.0 #[m]
si_lattice_short_straight_section_length = 6.0 #[m]
si_beam_energy = 3.0 # [GeV]
si_beam_current = 350.0 #[mA]
si_rf_harmonic_number = 864
si_rf_peak_voltage = 2.7 #[MV]
si_magnet_dipole_b1_deflection_angle = 2.76654 #[deg]
si_magnet_dipole_b1_hardedge_length = 0.828080 #[m]
si_magnet_dipole_b2_deflection_angle = 4.10351 #[deg]
si_magnet_dipole_b2_hardedge_length = 1.228262 #[m]
si_magnet_dipole_b3_deflection_angle = 1.42995 #[deg]
si_magnet_dipole_b3_hardedge_length = 0.428011 #[m]
si_magnet_dipole_bc_hardedge_length = 0.125394 #[m]
si_magnet_dipole_bc_deflection_angle = 1.40000 #[deg]
si_magnet_sextupole_sda_number = 20
si_magnet_sextupole_sda_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sda_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sfa_number = 20
si_magnet_sextupole_sfa_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sfa_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sdb_number = 20
si_magnet_sextupole_sdb_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sdb_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sfb_number = 20
si_magnet_sextupole_sfb_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sfb_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sd1_number = 20
si_magnet_sextupole_sd1_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sd1_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sd2_number = 20
si_magnet_sextupole_sd2_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sd2_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sd3_number = 20
si_magnet_sextupole_sd3_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sd3_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sd4_number = 20
si_magnet_sextupole_sd4_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sd4_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sd5_number = 20
si_magnet_sextupole_sd5_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sd5_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sd6_number = 20
si_magnet_sextupole_sd6_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sd6_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sf1_number = 20
si_magnet_sextupole_sf1_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sf1_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sf2_number = 20
si_magnet_sextupole_sf2_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sf2_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sf3_number = 20
si_magnet_sextupole_sf3_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sf3_integrated_sextupolar_field_maximum = 360 #[T/m]
si_magnet_sextupole_sf4_number = 20
si_magnet_sextupole_sf4_hardedge_length = 0.15 #[m]
si_magnet_sextupole_sf4_integrated_sextupolar_field_maximum = 360 #[T/m]
''' correction system '''
si_bpm_number = 180
si_magnet_chs_number = 160
si_magnet_cvs_number = 120
si_magnet_chf_number = 80
si_magnet_cvf_number = 80
si_magnet_qs_number = 80
si_magnet_chs_maximum_strength = 250 #[urad]
si_magnet_cvs_maximum_strength = 250 #[urad]
si_magnet_chf_maximum_strength = 25 #[urad]
si_magnet_cvf_maximum_strength = 25 #[urad]
si_magnet_qs_maximum_strength = 0.003 # [1/m]
si_optics_default_mode = 'C02'
si_optics_tune_horizontal = 4.813860814231471E+01
si_optics_tune_vertical = 1.320733867979753E+01
si_optics_tune_synchrotron = 4.364436028401864E-03
si_optics_tune_synchrotron_dipole = 4.364436028401864E-03
si_optics_chromaticity_horizontal = -4.330757974457811E-03
si_optics_natural_chromaticity_horizontal = -1.252309601795787E+02
si_optics_chromaticity_vertical = -6.926578421939666E-01
si_optics_natural_chromaticity_vertical = -8.022172846011699E+01
si_optics_beam_size_horizontal_long_straight_section = 7.009486798878925E+01 #[um]
si_optics_beam_size_horizontal_short_straight_section = 2.084762622184360E+01 #[um]
si_optics_beam_size_horizontal_dipole_bc = 9.948943931676647E+00 #[um]
si_optics_beam_size_vertical_long_straight_section = 3.214983743415169E+00 #[um]
si_optics_beam_size_vertical_short_straight_section = 1.939167731203255E+00 #[um]
si_optics_beam_size_vertical_dipole_bc = 3.996725872734588E+00 #[um]
si_optics_beam_divergence_horizontal_long_straight_section = 3.915259575507898E+00 #[urad]
si_optics_beam_divergence_horizontal_short_straight_section = 1.316409860763686E+01 #[urad]
si_optics_beam_divergence_horizontal_dipole_bc = 2.942299673538725E+01 #[urad]
si_optics_beam_divergence_vertical_long_straight_section = 8.536265817057522E-01 #[urad]
si_optics_beam_divergence_vertical_short_straight_section = 1.415244044631019E+00 #[urad]
si_optics_beam_divergence_vertical_dipole_bc = 6.866611249310436E-01 #[urad]
''' DIPOLES ONLY '''
si_optics_radiation_integral_i1_dipole = +8.799905562300937E-02 #[m]
si_optics_radiation_integral_i2_dipole = +4.331040689899748E-01 #[1/m]
si_optics_radiation_integral_i3_dipole = +3.825787715746642E-02 #[1/m^2]
si_optics_radiation_integral_i4_dipole = -1.331248659312025E-01 #[1/m]
si_optics_radiation_integral_i5_dipole = +1.176581653611004E-05 #[1/m]
si_optics_radiation_integral_i6_dipole = +1.800079309293100E-02 #[1/m]
''' IDs '''
si_optics_radiation_integral_i1_id = 0.0 #[m]
si_optics_radiation_integral_i2_id = 0.0 #[1/m]
si_optics_radiation_integral_i3_id = 0.0 #[1/m^2]
si_optics_radiation_integral_i4_id = 0.0 #[1/m]
si_optics_radiation_integral_i5_id = 0.0 #[1/m]
si_optics_radiation_integral_i6_id = 0.0 #[1/m]
''' DIPOLES and IDs '''
si_optics_transverse_coupling = 1.0 # [%]
si_optics_damping_partition_number_vertical_dipole = 1.0
si_optics_damping_partition_number_vertical = 1.0
si_error_alignment_dipole = 40 #[μm]
si_error_alignment_quadrupole = 40 #[μm]
si_error_alignment_sextupole = 40 #[μm]
si_error_roll_dipole = 0.2 #[mrad]
si_error_roll_quadrupole = 0.2 #[mrad]
si_error_roll_sextupole = 0.2 #[mrad]
si_error_excitation_dipole = 0.05 #[%]
si_error_excitation_quadrupole = 0.05 #[%]
si_error_excitation_sextupole = 0.05 #[%]
si_error_ripple_dipole = 20 # [ppm]
si_error_ripple_quadrupole = 20 # [ppm]
si_error_ripple_sextupole = 20 # [ppm]
si_error_vibration_dipole = 6 # [nm]
si_error_vibration_quadrupole = 6 # [nm]
si_error_vibration_sextupole = 6 # [nm]
parameter_list = [
Parameter(name='SI lattice version', group='FAC', is_derived=False, value=si_lattice_version, symbol='', units='', deps=[], obs=[], ),
Parameter(name='SI lattice type', group='FAC', is_derived=False, value=si_lattice_type, symbol='', units='', deps=['SI lattice version'], obs=[], ),
Parameter(name='SI lattice circumference', group='GIA', is_derived=False, value=si_lattice_circumference, symbol=r'<math>C</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI lattice symmetry', group='FAC', is_derived=False, value=si_lattice_symmetry, symbol=r'<math>N_\text{SUPERCELLS}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI lattice long straight section number', group='FAC', is_derived=True, value='"SI lattice symmetry"', symbol=r'<math>N_\text{lss}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI lattice short straight section number', group='FAC', is_derived=True, value='"SI lattice symmetry"', symbol=r'<math>N_\text{sss}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI lattice long straight section length', group='FAC', is_derived=False, value=si_lattice_long_straight_section_length, symbol=r'<math>L_\text{lss}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI lattice short straight section length', group='FAC', is_derived=False, value=si_lattice_short_straight_section_length, symbol=r'<math>L_\text{sss}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI beam energy', group='GIA', is_derived=False, value=si_beam_energy, symbol=r'<math>E</math>', units='GeV', deps=[], obs=[], ),
Parameter(name='SI beam current', group='FAC', is_derived=False, value=si_beam_current, symbol=r'<math>I</math>', units='mA', deps=[], obs=[], ),
Parameter(name='SI beam gamma factor', group='FAC', is_derived=True, value='gamma("SI beam energy")', symbol=r'<math>\gamma</math>', units='', deps=[], obs=[r'<math>\gamma = \frac{E}{E_0}</math>'], ),
Parameter(name='SI beam beta factor', group='FAC', is_derived=True, value='beta("SI beam gamma factor")', symbol=r'<math>\beta</math>', units='', deps=[], obs=[r'<math>\beta \equiv \sqrt{1 - \frac{1}{\gamma^2}}</math>'], ),
Parameter(name='SI beam velocity', group='FAC', is_derived=True, value='velocity("SI beam beta factor")', symbol=r'<math>v</math>', units='m/s', deps=[], obs=[r'<math>v \equiv \beta c</math>'], ),
Parameter(name='SI beam magnetic rigidity', group='FAC', is_derived=True, value='brho("SI beam energy", "SI beam beta factor")', symbol=r'<math>(B\rho)</math>', units=unicode('T·m', encoding='utf-8'), deps=[], obs=[r'<math>(B\rho) = \frac{p}{ec} = \frac{E}{ec^2}</math>'], ),
Parameter(name='SI beam revolution period', group='FAC', is_derived=True, value='revolution_period("SI lattice circumference", "SI beam velocity")', symbol=r'<math>T_\text{rev}</math>', units=unicode('μs',encoding='utf-8'), deps=[], obs=[r'<math>T_\text{rev} \equiv \frac{C}{v}</math>'], ),
Parameter(name='SI beam revolution frequency', group='FAC', is_derived=True, value='1.0/"SI beam revolution period"', symbol=r'<math>f_\text{rev}</math>', units='MHz', deps=[], obs=[r'<math>f_\text{rev} \equiv \frac{1}{T_\text{rev}}</math>'], ),
Parameter(name='SI beam electron number', group='FAC', is_derived=True, value='number_of_electrons("SI beam current", "SI beam revolution period")', symbol=r'<math>N</math>', units='', deps=[], obs=[], ),
Parameter(name='SI rf harmonic number', group='FAC', is_derived=False, value=si_rf_harmonic_number, symbol=r'<math>h</math>', units='', deps=[], obs=[], ),
Parameter(name='SI rf frequency', group='FAC', is_derived=True, value='rf_frequency("SI beam revolution frequency", "SI rf harmonic number")', symbol=r'<math>f_\text{rf}</math>', units='MHz', deps=[], obs=[], ),
Parameter(name='SI rf peak voltage', group='FAC', is_derived=False, value=si_rf_peak_voltage, symbol=r'<math>V_\text{rf}</math>', units='MV', deps=[], obs=[], ),
Parameter(name='SI magnet dipole b1 number', group='FAC', is_derived=True, value='4*"SI lattice symmetry"', symbol=r'<math>N_\text{b1}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet dipole b1 deflection angle', group='FAC', is_derived=False, value=si_magnet_dipole_b1_deflection_angle, symbol=r'<math>\theta_\text{b1}</math>', units=unicode('°',encoding='utf-8'), deps=[], obs=[], ),
Parameter(name='SI magnet dipole b1 hardedge length', group='FAC', is_derived=False, value=si_magnet_dipole_b1_hardedge_length, symbol=r'<math>L_\text{b1}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet dipole b1 hardedge bending radius', group='FAC', is_derived=True, value='"SI magnet dipole b1 hardedge length" / deg2rad("SI magnet dipole b1 deflection angle")', symbol=r'<math>\rho_\text{b1}</math>', units='m', deps=[], obs=[r'<math>\rho_\text{b1} = \frac{L_\text{b1}}{\theta_\text{b1}}</math>'], ),
Parameter(name='SI magnet dipole b1 hardedge magnetic field', group='FAC', is_derived=True, value='"SI beam magnetic rigidity" / "SI magnet dipole b1 hardedge bending radius"', symbol=r'<math>B_\text{b1}</math>', units='T', deps=[], obs=[r'<math>B_\text{b1} = \frac{(B\rho)}{\rho_\text{b1}}</math>'], ),
Parameter(name='SI magnet dipole b1 hardedge critical energy', group='FAC', is_derived=True, value='critical_energy("SI beam gamma factor", "SI magnet dipole b1 hardedge bending radius")', symbol=r'<math>\epsilon_\text{c,b1}</math>', units='keV', deps=[], obs=[r'<math>\epsilon_\text{c,b1} = \frac{3}{2} \hbar c \frac{\gamma^3}{\rho_\text{b1}}</math>'], ),
Parameter(name='SI magnet dipole b1 hardedge sagitta', group='FAC', is_derived=True, value='1000*"SI magnet dipole b1 hardedge bending radius"*(1.0-cos(0.5*deg2rad("SI magnet dipole b1 deflection angle")))', symbol=r'<math>S_\text{sag, b1}</math>', units='mm', deps=[], obs=[r'<math>S_\text{sag, b1} = \rho_\text{b1} (1 - \cos \theta_\text{b1} / 2)</math>'],),
Parameter(name='SI magnet dipole b2 number', group='FAC', is_derived=True, value='4*"SI lattice symmetry"', symbol=r'<math>N_\text{b2}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet dipole b2 deflection angle', group='FAC', is_derived=False, value=si_magnet_dipole_b2_deflection_angle, symbol=r'<math>\theta_\text{b2}</math>', units=unicode('°',encoding='utf-8'), deps=[], obs=[], ),
Parameter(name='SI magnet dipole b2 hardedge length', group='FAC', is_derived=False, value=si_magnet_dipole_b2_hardedge_length, symbol=r'<math>L_\text{b2}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet dipole b2 hardedge bending radius', group='FAC', is_derived=True, value='"SI magnet dipole b2 hardedge length" / deg2rad("SI magnet dipole b2 deflection angle")', symbol=r'<math>\rho_\text{b2}</math>', units='m', deps=[], obs=[r'<math>\rho_\text{b2} = \frac{L_\text{b2}}{\theta_\text{b2}}</math>'], ),
Parameter(name='SI magnet dipole b2 hardedge magnetic field', group='FAC', is_derived=True, value='"SI beam magnetic rigidity" / "SI magnet dipole b2 hardedge bending radius"', symbol=r'<math>B_\text{b2}</math>', units='T', deps=[], obs=[r'<math>B_\text{b2} = \frac{(B\rho)}{\rho_\text{b2}}</math>'], ),
Parameter(name='SI magnet dipole b2 hardedge critical energy', group='FAC', is_derived=True, value='critical_energy("SI beam gamma factor", "SI magnet dipole b2 hardedge bending radius")', symbol=r'<math>\epsilon_\text{c,b2}</math>', units='keV', deps=[], obs=[r'<math>\epsilon_\text{c,b2} = \frac{3}{2} \hbar c \frac{\gamma^3}{\rho_\text{b2}}</math>'], ),
Parameter(name='SI magnet dipole b2 hardedge sagitta', group='FAC', is_derived=True, value='1000*"SI magnet dipole b2 hardedge bending radius" * (1.0-cos(0.5*deg2rad("SI magnet dipole b2 deflection angle")))', symbol=r'<math>S_\text{sag, b2}</math>', units='mm', deps=[], obs=[r'<math>S_\text{sag, b2} = \rho_\text{b2} (1 - \cos \theta_\text{b2} / 2)</math>'],),
Parameter(name='SI magnet dipole b3 number', group='FAC', is_derived=True, value='4*"SI lattice symmetry"', symbol=r'<math>N_\text{b3}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet dipole b3 deflection angle', group='FAC', is_derived=False, value=si_magnet_dipole_b3_deflection_angle, symbol=r'<math>\theta_\text{b3}</math>', units=unicode('°',encoding='utf-8'), deps=[], obs=[], ),
Parameter(name='SI magnet dipole b3 hardedge length', group='FAC', is_derived=False, value=si_magnet_dipole_b3_hardedge_length, symbol=r'<math>L_\text{b3}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet dipole b3 hardedge bending radius', group='FAC', is_derived=True, value='"SI magnet dipole b3 hardedge length" / deg2rad("SI magnet dipole b3 deflection angle")', symbol=r'<math>\rho_\text{b3}</math>', units='m', deps=[], obs=[r'<math>\rho_\text{b3} = \frac{L_\text{b3}}{\theta_\text{b3}}</math>'], ),
Parameter(name='SI magnet dipole b3 hardedge magnetic field', group='FAC', is_derived=True, value='"SI beam magnetic rigidity" / "SI magnet dipole b3 hardedge bending radius"', symbol=r'<math>B_\text{b3}</math>', units='T', deps=[], obs=[r'<math>B_\text{b3} = \frac{(B\rho)}{\rho_\text{b3}}</math>'], ),
Parameter(name='SI magnet dipole b3 hardedge critical energy', group='FAC', is_derived=True, value='critical_energy("SI beam gamma factor", "SI magnet dipole b3 hardedge bending radius")', symbol=r'<math>\epsilon_\text{c,b3}</math>', units='keV', deps=[], obs=[r'<math>\epsilon_\text{c,b3} = \frac{3}{2} \hbar c \frac{\gamma^3}{\rho_\text{b3}}</math>'], ),
Parameter(name='SI magnet dipole b3 hardedge sagitta', group='FAC', is_derived=True, value='1000*"SI magnet dipole b3 hardedge bending radius" * (1.0-cos(0.5*deg2rad("SI magnet dipole b3 deflection angle")))', symbol=r'<math>S_\text{sag, b3}</math>', units='mm', deps=[], obs=[r'<math>S_\text{sag, b3} = \rho_\text{b3} (1 - \cos \theta_\text{b3} / 2)</math>'],),
Parameter(name='SI magnet dipole bc number', group='FAC', is_derived=True, value='2*"SI lattice symmetry"', symbol=r'<math>N_\text{bc}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet dipole bc deflection angle', group='FAC', is_derived=False, value=si_magnet_dipole_bc_deflection_angle, symbol=r'<math>\theta_\text{bc}</math>', units=unicode('°',encoding='utf-8'), deps=[], obs=[], ),
Parameter(name='SI magnet dipole bc hardedge length', group='FAC', is_derived=False, value=si_magnet_dipole_bc_hardedge_length, symbol=r'<math>L_\text{bc}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet dipole bc hardedge bending radius', group='FAC', is_derived=True, value='"SI magnet dipole bc hardedge length" / deg2rad("SI magnet dipole bc deflection angle")', symbol=r'<math>\rho_\text{bc}</math>', units='m', deps=[], ),
Parameter(name='SI magnet dipole bc hardedge magnetic field', group='FAC', is_derived=True, value='"SI beam magnetic rigidity" / "SI magnet dipole bc hardedge bending radius"', symbol=r'<math>B_\text{bc}</math>', units='T', deps=[], ),
Parameter(name='SI magnet dipole bc hardedge critical energy', group='FAC', is_derived=True, value='critical_energy("SI beam gamma factor", "SI magnet dipole bc hardedge bending radius")', symbol=r'<math>\epsilon_\text{c,bc}</math>', units='keV', deps=[], ),
Parameter(name='SI magnet dipole bc hardedge sagitta', group='FAC', is_derived=True, value='1000 * "SI magnet dipole bc hardedge bending radius" * (1.0 - cos(0.5*deg2rad("SI magnet dipole bc deflection angle")))', symbol=r'<math>S_\text{sag, bc}</math>', units='mm', deps=[],),
Parameter(name='SI magnet sextupole sda number', group='FAC', is_derived=False, value=si_magnet_sextupole_sda_number, symbol=r'<math>N_\text{sda}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sda hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sda_hardedge_length, symbol=r'<math>L_\text{sda}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sda integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sda_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sda}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sda} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sda hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sda integrated sextupolar field maximum" / "SI magnet sextupole sda hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sda}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sda hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sda hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sda}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sfa number', group='FAC', is_derived=False, value=si_magnet_sextupole_sfa_number, symbol=r'<math>N_\text{sfa}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sfa hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sfa_hardedge_length, symbol=r'<math>L_\text{sfa}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sfa integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sfa_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sfa}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sfa} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sfa hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sfa integrated sextupolar field maximum" / "SI magnet sextupole sfa hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sfa}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sfa hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sfa hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sfa}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sdb number', group='FAC', is_derived=False, value=si_magnet_sextupole_sdb_number, symbol=r'<math>N_\text{sdb}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sdb hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sdb_hardedge_length, symbol=r'<math>L_\text{sdb}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sdb integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sdb_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sdb}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sdb} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sdb hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sdb integrated sextupolar field maximum" / "SI magnet sextupole sdb hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sdb}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sdb hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sdb hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sdb}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sfb number', group='FAC', is_derived=False, value=si_magnet_sextupole_sfb_number, symbol=r'<math>N_\text{sfb}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sfb hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sfb_hardedge_length, symbol=r'<math>L_\text{sfb}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sfb integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sfb_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sfb}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sfb} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sfb hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sfb integrated sextupolar field maximum" / "SI magnet sextupole sfb hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sfb}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sfb hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sfb hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sfb}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd1 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sd1_number, symbol=r'<math>N_\text{sd1}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd1 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sd1_hardedge_length, symbol=r'<math>L_\text{sd1}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd1 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sd1_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sd1}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sd1} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sd1 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd1 integrated sextupolar field maximum" / "SI magnet sextupole sd1 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sd1}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd1 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd1 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sd1}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd2 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sd2_number, symbol=r'<math>N_\text{sd2}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd2 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sd2_hardedge_length, symbol=r'<math>L_\text{sd2}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd2 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sd2_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sd2}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sd2} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sd2 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd2 integrated sextupolar field maximum" / "SI magnet sextupole sd2 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sd2}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd2 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd2 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sd2}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd3 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sd3_number, symbol=r'<math>N_\text{sd3}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd3 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sd3_hardedge_length, symbol=r'<math>L_\text{sd3}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd3 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sd3_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sd3}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sd3} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sd3 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd3 integrated sextupolar field maximum" / "SI magnet sextupole sd3 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sd3}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd3 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd3 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sd3}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd4 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sd4_number, symbol=r'<math>N_\text{sd4}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd4 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sd4_hardedge_length, symbol=r'<math>L_\text{sd4}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd4 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sd4_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sd4}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sd4} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sd4 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd4 integrated sextupolar field maximum" / "SI magnet sextupole sd4 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sd4}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd4 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd4 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sd4}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd5 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sd5_number, symbol=r'<math>N_\text{sd5}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd5 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sd5_hardedge_length, symbol=r'<math>L_\text{sd5}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd5 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sd5_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sd5}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sd5} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sd5 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd5 integrated sextupolar field maximum" / "SI magnet sextupole sd5 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sd5}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd5 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd5 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sd5}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd6 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sd6_number, symbol=r'<math>N_\text{sd6}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd6 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sd6_hardedge_length, symbol=r'<math>L_\text{sd6}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sd6 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sd6_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sd6}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sd6} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sd6 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd6 integrated sextupolar field maximum" / "SI magnet sextupole sd6 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sd6}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sd6 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sd6 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sd6}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf1 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sf1_number, symbol=r'<math>N_\text{sf1}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf1 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sf1_hardedge_length, symbol=r'<math>L_\text{sf1}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf1 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sf1_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sf1}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sf1} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sf1 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf1 integrated sextupolar field maximum" / "SI magnet sextupole sf1 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sf1}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf1 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf1 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sf1}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf2 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sf2_number, symbol=r'<math>N_\text{sf2}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf2 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sf2_hardedge_length, symbol=r'<math>L_\text{sf2}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf2 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sf2_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sf2}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sf2} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sf2 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf2 integrated sextupolar field maximum" / "SI magnet sextupole sf2 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sf2}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf2 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf2 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sf2}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf3 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sf3_number, symbol=r'<math>N_\text{sf3}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf3 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sf3_hardedge_length, symbol=r'<math>L_\text{sf3}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf3 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sf3_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sf3}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sf3} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sf3 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf3 integrated sextupolar field maximum" / "SI magnet sextupole sf3 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sf3}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf3 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf3 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sf3}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf4 number', group='FAC', is_derived=False, value=si_magnet_sextupole_sf4_number, symbol=r'<math>N_\text{sf4}</math>', units='', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf4 hardedge length', group='FAC', is_derived=False, value=si_magnet_sextupole_sf4_hardedge_length, symbol=r'<math>L_\text{sf4}</math>', units='m', deps=[], obs=[], ),
Parameter(name='SI magnet sextupole sf4 integrated sextupolar field maximum', group='FAC', is_derived=False, value=si_magnet_sextupole_sf4_integrated_sextupolar_field_maximum, symbol=r"<math>(1/2)(B''L)_\text{max, sf4}</math>", units='T.m<sup>-1</sup>', deps=[], obs=[r"<math>(1/2)(B''L)_\text{max, sf4} \equiv \frac{1}{2} \int{ds\;\frac{\partial^2 B_y}{\partial x^2}}</math>"], ),
Parameter(name='SI magnet sextupole sf4 hardedge sextupolar field maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf4 integrated sextupolar field maximum" / "SI magnet sextupole sf4 hardedge length"', symbol=r"<math>(1/2)B''_\text{max, sf4}</math>", units='T.m<sup>-2</sup>', deps=[], ),
Parameter(name='SI magnet sextupole sf4 hardedge strength maximum', group='FAC', is_derived=True, value='"SI magnet sextupole sf4 hardedge sextupolar field maximum" / "SI beam magnetic rigidity"', symbol=r"<math>S_\text{max, sf4}</math>", units='m<sup>-3</sup>', deps=[], ),
Parameter(name='SI bpm number', group='FAC', is_derived=False, value=si_bpm_number, symbol=r'<math>N_\text{bpm}</math>', units='', deps=[], obs=[]),
Parameter(name='SI magnet chs number', group='FAC', is_derived=False, value=si_magnet_chs_number, symbol=r'<math>N_\text{chs}</math>', units='', deps=[], obs=[]),
Parameter(name='SI magnet cvs number', group='FAC', is_derived=False, value=si_magnet_cvs_number, symbol=r'<math>N_\text{cvs}</math>', units='', deps=[], obs=[]),
Parameter(name='SI magnet chf number', group='FAC', is_derived=False, value=si_magnet_chf_number, symbol=r'<math>N_\text{chf}</math>', units='', deps=[], obs=[]),
Parameter(name='SI magnet cvf number', group='FAC', is_derived=False, value=si_magnet_cvf_number, symbol=r'<math>N_\text{cvf}</math>', units='', deps=[], obs=[]),
Parameter(name='SI magnet qs number', group='FAC', is_derived=False, value=si_magnet_qs_number, symbol=r'<math>N_\text{qs}</math>', units='', deps=[], obs=[]),
Parameter(name='SI magnet chs maximum strength', group='FAC', is_derived=False, value=si_magnet_chs_maximum_strength, symbol=r'<math>\theta_\text{max,chs}</math>', units=unicode('μrad', encoding='utf-8'), deps=[], obs=[]),
Parameter(name='SI magnet cvs maximum strength', group='FAC', is_derived=False, value=si_magnet_cvs_maximum_strength, symbol=r'<math>\theta_\text{max,cvs}</math>', units=unicode('μrad', encoding='utf-8'), deps=[], obs=[]),
Parameter(name='SI magnet chf maximum strength', group='FAC', is_derived=False, value=si_magnet_chf_maximum_strength, symbol=r'<math>\theta_\text{max,chf}</math>', units=unicode('μrad', encoding='utf-8'), deps=[], obs=[]),
Parameter(name='SI magnet cvf maximum strength', group='FAC', is_derived=False, value=si_magnet_cvf_maximum_strength, symbol=r'<math>\theta_\text{max,cvf}</math>', units=unicode('μrad', encoding='utf-8'), deps=[], obs=[]),
Parameter(name='SI magnet qs maximum strength', group='FAC', is_derived=False, value=si_magnet_qs_maximum_strength, symbol=r'<math>\theta_\text{max,qs}</math>', units='m<sup>-1</sup>', deps=[], obs=[]),
Parameter(name='SI optics default mode', group='FAC', is_derived=False, value=si_optics_default_mode, symbol='', units='', deps=['SI lattice version'], obs=[], ),
Parameter(name='SI optics tune horizontal', group='FAC', is_derived=False, value=si_optics_tune_horizontal, symbol=r'<math>\nu_x</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics tune vertical', group='FAC', is_derived=False, value=si_optics_tune_vertical, symbol=r'<math>\nu_y</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics tune synchrotron dipole', group='FAC', is_derived=False, value=si_optics_tune_synchrotron_dipole, symbol=r'<math>\nu_{s,\text{dip}}</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics tune synchrotron', group='FAC', is_derived=False, value=si_optics_tune_synchrotron, symbol=r'<math>\nu_{s}</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics betatron frequency horizontal', group='FAC', is_derived=True, value='frequency_from_tune("SI beam revolution frequency", "SI optics tune horizontal")', symbol=r'<math>f_x</math>', units='kHz', deps=[], ),
Parameter(name='SI optics betatron frequency vertical', group='FAC', is_derived=True, value='frequency_from_tune("SI beam revolution frequency", "SI optics tune vertical")', symbol=r'<math>f_y</math>', units='kHz', deps=[], ),
Parameter(name='SI optics synchrotron frequency', group='FAC', is_derived=True, value='frequency_from_tune("SI beam revolution frequency", "SI optics tune synchrotron")', symbol=r'<math>f_{s}</math>', units='kHz', deps=[], ),
Parameter(name='SI optics natural linear chromaticity horizontal', group='FAC', is_derived=False, value=si_optics_natural_chromaticity_horizontal, symbol=r'<math>\xi_{0, x}</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics linear chromaticity horizontal', group='FAC', is_derived=False, value=si_optics_chromaticity_horizontal, symbol=r'<math>\xi_{x}</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics natural linear chromaticity vertical', group='FAC', is_derived=False, value=si_optics_natural_chromaticity_vertical, symbol=r'<math>\xi_{0, y}</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics linear chromaticity vertical', group='FAC', is_derived=False, value=si_optics_chromaticity_vertical, symbol=r'<math>\xi_{y}</math>', units='', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics beam size horizontal long straight section', group='FAC', is_derived=False, value=si_optics_beam_size_horizontal_long_straight_section, symbol=r'<math>\sigma_\text{x, lss}</math>', units=unicode('μm',encoding='utf-8'), deps=['SI optics default mode'], obs=[r'<math>\sigma_\text{x, lss} = \sqrt{\epsilon_x \beta_{x, \text{lss}} + \left(\sigma_\delta \eta_{x, \text{lss}}\right)^2}</math>'], ),
Parameter(name='SI optics beam size horizontal short straight section', group='FAC', is_derived=False, value=si_optics_beam_size_horizontal_short_straight_section, symbol=r'<math>\sigma_\text{x, sss}</math>', units=unicode('μm',encoding='utf-8'), deps=['SI optics default mode'], obs=[r'<math>\sigma_\text{x, sss} = \sqrt{\epsilon_x \beta_{x, \text{sss}} + \left(\sigma_\delta \eta_{x, \text{sss}}\right)^2}</math>'], ),
Parameter(name='SI optics beam size horizontal dipole bc', group='FAC', is_derived=False, value=si_optics_beam_size_horizontal_dipole_bc, symbol=r'<math>\sigma_\text{x, bc}</math>', units=unicode('μm',encoding='utf-8'), deps=['SI optics default mode'], obs=[r'<math>\sigma_\text{x, bc} = \sqrt{\epsilon_x \beta_{x, \text{bc}} + \left(\sigma_\delta \eta_{x, \text{bc}}\right)^2}</math>'], ),
Parameter(name='SI optics beam size vertical long straight section', group='FAC', is_derived=False, value=si_optics_beam_size_vertical_long_straight_section, symbol=r'<math>\sigma_\text{y, lss}</math>', units=unicode('μm',encoding='utf-8'), deps=['SI optics default mode'], obs=[r'<math>\sigma_\text{y, lss} = \sqrt{\epsilon_y \beta_{y, \text{lss}} + \left(\sigma_\delta \eta_{y, \text{lss}}\right)^2}</math>'], ),
Parameter(name='SI optics beam size vertical short straight section', group='FAC', is_derived=False, value=si_optics_beam_size_vertical_short_straight_section, symbol=r'<math>\sigma_\text{y, sss}</math>', units=unicode('μm',encoding='utf-8'), deps=['SI optics default mode'], obs=[r'<math>\sigma_\text{y, sss} = \sqrt{\epsilon_y \beta_{y, \text{sss}} + \left(\sigma_\delta \eta_{y, \text{sss}}\right)^2}</math>'], ),
Parameter(name='SI optics beam size vertical dipole bc', group='FAC', is_derived=False, value=si_optics_beam_size_vertical_dipole_bc, symbol=r'<math>\sigma_\text{y, bc}</math>', units=unicode('μm',encoding='utf-8'), deps=['SI optics default mode'], obs=[r'<math>\sigma_\text{y, bc} = \sqrt{\epsilon_y \beta_{y, \text{bc}} + \left(\sigma_\delta \eta_{y, \text{bc}}\right)^2}</math>'], ),
Parameter(name='SI optics beam divergence horizontal long straight section', group='FAC', is_derived=False, value=si_optics_beam_divergence_horizontal_long_straight_section, symbol=r"<math>\sigma_\text{x', lss}</math>", units=unicode('μrad',encoding='utf-8'), deps=['SI optics default mode'], obs=[r"<math>\sigma_\text{x', lss} = \sqrt{\epsilon_x \gamma_{x, \text{lss}} + \left(\sigma_\delta \eta'_{x, \text{lss}}\right)^2}</math>"], ),
Parameter(name='SI optics beam divergence horizontal short straight section', group='FAC', is_derived=False, value=si_optics_beam_divergence_horizontal_short_straight_section, symbol=r"<math>\sigma_\text{x', sss}</math>", units=unicode('μrad',encoding='utf-8'), deps=['SI optics default mode'], obs=[r"<math>\sigma_\text{x', sss} = \sqrt{\epsilon_x \gamma_{x, \text{sss}} + \left(\sigma_\delta \eta'_{x, \text{sss}}\right)^2}</math>"], ),
Parameter(name='SI optics beam divergence horizontal dipole bc', group='FAC', is_derived=False, value=si_optics_beam_divergence_horizontal_dipole_bc, symbol=r"<math>\sigma_\text{x', bc}</math>", units=unicode('μrad',encoding='utf-8'), deps=['SI optics default mode'], obs=[r"<math>\sigma_\text{x', bc} = \sqrt{\epsilon_x \gamma_{x, \text{bc}} + \left(\sigma_\delta \eta'_{x, \text{bc}}\right)^2}</math>"], ),
Parameter(name='SI optics beam divergence vertical long straight section', group='FAC', is_derived=False, value=si_optics_beam_divergence_vertical_long_straight_section, symbol=r"<math>\sigma_\text{y', lss}</math>", units=unicode('μrad',encoding='utf-8'), deps=['SI optics default mode'], obs=[r"<math>\sigma_\text{y', lss} = \sqrt{\epsilon_y \gamma_{y, \text{lss}} + \left(\sigma_\delta \eta'_{y, \text{lss}}\right)^2}</math>"], ),
Parameter(name='SI optics beam divergence vertical short straight section', group='FAC', is_derived=False, value=si_optics_beam_divergence_vertical_short_straight_section, symbol=r"<math>\sigma_\text{y', sss}</math>", units=unicode('μrad',encoding='utf-8'), deps=['SI optics default mode'], obs=[r"<math>\sigma_\text{y', sss} = \sqrt{\epsilon_y \gamma_{y, \text{sss}} + \left(\sigma_\delta \eta'_{y, \text{sss}}\right)^2}</math>"], ),
Parameter(name='SI optics beam divergence vertical dipole bc', group='FAC', is_derived=False, value=si_optics_beam_divergence_vertical_dipole_bc, symbol=r"<math>\sigma_\text{y', bc}</math>", units=unicode('μrad',encoding='utf-8'), deps=['SI optics default mode'], obs=[r"<math>\sigma_\text{y', bc} = \sqrt{\epsilon_y \gamma_{y, \text{bc}} + \left(\sigma_\delta \eta'_{y, \text{bc}}\right)^2}</math>"], ),
Parameter(name='SI optics radiation integral i1 dipole', group='FAC', is_derived=False, value=si_optics_radiation_integral_i1_dipole, symbol=r'<math>I_\text{1,dip}</math>', units='m', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{1,dip} = \oint{\frac{\eta_x}{\rho_x}\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i2 dipole', group='FAC', is_derived=False, value=si_optics_radiation_integral_i2_dipole, symbol=r'<math>I_\text{2,dip}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{2,dip} = \oint{\frac{1}{\rho_x^2}\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i3 dipole', group='FAC', is_derived=False, value=si_optics_radiation_integral_i3_dipole, symbol=r'<math>I_\text{3,dip}</math>', units='m<sup>-2</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{3,dip} = \oint{\frac{1}{|\rho_x|^3}\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i4 dipole', group='FAC', is_derived=False, value=si_optics_radiation_integral_i4_dipole, symbol=r'<math>I_\text{4,dip}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{4,dip} = \frac{\eta_x(s_0) \tan \theta(s_0)}{\rho_x^2} + \oint{\frac{\eta_x}{\rho_x^3} \left(1 + 2 \rho_x^2 k\right)\,ds} + \frac{\eta_x(s_1) \tan \theta(s_1)}{\rho_x^2}</math>'], ),
Parameter(name='SI optics radiation integral i5 dipole', group='FAC', is_derived=False, value=si_optics_radiation_integral_i5_dipole, symbol=r'<math>I_\text{5,dip}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{5,dip} = \oint{\frac{H_x}{|\rho_x|^3}\,ds}</math>', r"<math>H_x \equiv \gamma_x \eta_x^2 + 2 \alpha_x \eta_x \eta_x^' + \beta_x {\eta_x^'}^2</math>"], ),
Parameter(name='SI optics radiation integral i6 dipole', group='FAC', is_derived=False, value=si_optics_radiation_integral_i6_dipole, symbol=r'<math>I_\text{6,dip}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{6,dip} = \oint{k^2 \eta_x^2\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i1 id', group='FAC', is_derived=False, value=si_optics_radiation_integral_i1_id, symbol=r'<math>I_\text{1,id}</math>', units='m', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{1,id} = \oint{\frac{\eta_x}{\rho_x}\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i2 id', group='FAC', is_derived=False, value=si_optics_radiation_integral_i2_id, symbol=r'<math>I_\text{2,id}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{2,id} = \oint{\frac{1}{\rho_x^2}\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i3 id', group='FAC', is_derived=False, value=si_optics_radiation_integral_i3_id, symbol=r'<math>I_\text{3,id}</math>', units='m<sup>-2</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{3,id} = \oint{\frac{1}{|\rho_x|^3}\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i4 id', group='FAC', is_derived=False, value=si_optics_radiation_integral_i4_id, symbol=r'<math>I_\text{4,id}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{4,id} = \frac{\eta_x(s_0) \tan \theta(s_0)}{\rho_x^2} + \oint{\frac{\eta_x}{\rho_x^3} \left(1 + 2 \rho_x^2 k\right)\,ds} + \frac{\eta_x(s_1) \tan \theta(s_1)}{\rho_x^2}</math>'], ),
Parameter(name='SI optics radiation integral i5 id', group='FAC', is_derived=False, value=si_optics_radiation_integral_i5_id, symbol=r'<math>I_\text{5,id}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{5,id} = \oint{\frac{H_x}{|\rho_x|^3}\,ds}</math>', r"<math>H_x \equiv \gamma_x \eta_x^2 + 2 \alpha_x \eta_x \eta_x^' + \beta_x {\eta_x^'}^2</math>"], ),
Parameter(name='SI optics radiation integral i6 id', group='FAC', is_derived=False, value=si_optics_radiation_integral_i6_id, symbol=r'<math>I_\text{6,id}</math>', units='m<sup>-1</sup>', deps=['SI beam magnetic rigidity', 'SI optics default mode'], obs=[r'<math>I_\text{6,id} = \oint{k^2 \eta_x^2\,ds}</math>'], ),
Parameter(name='SI optics radiation integral i1', group='FAC', is_derived=True, value='("SI optics radiation integral i1 dipole" + "SI optics radiation integral i1 id")', symbol=r'<math>I_\text{1}</math>', units='m', deps=[], ),
Parameter(name='SI optics radiation integral i2', group='FAC', is_derived=True, value='("SI optics radiation integral i2 dipole" + "SI optics radiation integral i2 id")', symbol=r'<math>I_\text{2}</math>', units='m<sup>-1</sup>', deps=[], ),
Parameter(name='SI optics radiation integral i3', group='FAC', is_derived=True, value='("SI optics radiation integral i3 dipole" + "SI optics radiation integral i3 id")', symbol=r'<math>I_\text{3}</math>', units='m<sup>-2</sup>', deps=[], ),
Parameter(name='SI optics radiation integral i4', group='FAC', is_derived=True, value='("SI optics radiation integral i4 dipole" + "SI optics radiation integral i4 id")', symbol=r'<math>I_\text{4}</math>', units='m<sup>-1</sup>', deps=[], ),
Parameter(name='SI optics radiation integral i5', group='FAC', is_derived=True, value='("SI optics radiation integral i5 dipole" + "SI optics radiation integral i5 id")', symbol=r'<math>I_\text{5}</math>', units='m<sup>-1</sup>', deps=[], ),
Parameter(name='SI optics radiation integral i6', group='FAC', is_derived=True, value='("SI optics radiation integral i6 dipole" + "SI optics radiation integral i6 id")', symbol=r'<math>I_\text{6}</math>', units='m<sup>-1</sup>', deps=[], ),
Parameter(name='SI optics transverse coupling', group='FAC', is_derived=False, value=si_optics_transverse_coupling, symbol=r'<math>\kappa</math>', units='%', deps=['SI optics default mode'], obs=[], ),
Parameter(name='SI optics damping partition number vertical dipole', group='FAC', is_derived=False, value=si_optics_damping_partition_number_vertical_dipole, symbol=r'<math>J_{\text{y, dip}}</math>', units='', deps=[], obs=['Vertical damping partition number is identically one for error-free machines for which vertical dispersion functions are zero everywhere.', r'<math>J_{\text{y, dip}} = 1 - \frac{I_\text{4y,dip}}{I_\text{2,dip}} \equiv 1</math>'], ),
Parameter(name='SI optics damping partition number horizontal dipole', group='FAC', is_derived=True, value='Jx("SI optics radiation integral i2 dipole", "SI optics radiation integral i4 dipole")', symbol=r'<math>J_{\text{x, dip}}</math>', units='', deps=[], ),
Parameter(name='SI optics damping partition number longitudinal dipole', group='FAC', is_derived=True, value='Js("SI optics damping partition number horizontal dipole", "SI optics damping partition number vertical dipole")', symbol=r'<math>J_{\text{s, dip}}</math>', units='', deps=[], ),
Parameter(name='SI optics energy loss per turn dipole', group='FAC', is_derived=True, value='U0("SI beam energy", "SI optics radiation integral i2 dipole")', symbol=r'<math>U_\text{0,dip}</math>', units='keV', deps=[], ),
Parameter(name='SI optics radiation power dipole', group='FAC', is_derived=True, value='radiation_power("SI optics energy loss per turn dipole", "SI beam current")', symbol=r'<math>P_{\text{dip}}</math>', units='kW', deps=[], ),
Parameter(name='SI optics overvoltage dipole', group='FAC', is_derived=True, value='overvoltage("SI rf peak voltage", "SI optics energy loss per turn dipole")', symbol=r'<math>q_\text{dip}</math>', units='', deps=[], ),
Parameter(name='SI optics synchronous phase dipole', group='FAC', is_derived=True, value='sync_phase("SI optics overvoltage dipole")', symbol=r'<math>\phi_0</math>', units=unicode('°',encoding='utf-8'), deps=[], ),
Parameter(name='SI optics linear momentum compaction dipole', group='FAC', is_derived=True, value='alpha1("SI optics radiation integral i1 dipole", "SI lattice circumference")', symbol=r'<math>\alpha_\text{1,dip}</math>', units='', deps=[], ),
Parameter(name='SI optics linear slip phase dipole', group='FAC', is_derived=True, value='slip_factor("SI optics linear momentum compaction dipole", "SI beam gamma factor")', symbol=r'<math>\eta_{1,\text{dip}}</math>', units='', deps=[], ),
Parameter(name='SI optics rf energy acceptance dipole', group='FAC', is_derived=True, value='rf_energy_acceptance("SI optics overvoltage dipole", "SI beam energy", "SI optics energy loss per turn dipole", "SI rf harmonic number", "SI optics linear momentum compaction dipole")', symbol=r'<math>\epsilon_{\text{max},\text{dip}}</math>', units='%', deps=[], ),
Parameter(name='SI optics natural emittance dipole', group='FAC', is_derived=True, value='natural_emittance("SI beam gamma factor", "SI optics damping partition number horizontal dipole", "SI optics radiation integral i2 dipole", "SI optics radiation integral i5 dipole")', symbol=r'<math>\epsilon_{0,\text{dip}}</math>', units=unicode('nm⋅rad',encoding='utf-8'), deps=[], ),
Parameter(name='SI optics natural energy spread dipole', group='FAC', is_derived=True, value='energy_spread("SI beam gamma factor", "SI optics radiation integral i2 dipole", "SI optics radiation integral i3 dipole", "SI optics radiation integral i4 dipole")', symbol=r'<math>\sigma_{\delta,\text{dip}}</math>', units='%', deps=[], ),
Parameter(name='SI optics natural bunch length dipole', group='FAC', is_derived=True, value='bunch_length("SI optics linear slip phase dipole", "SI optics natural energy spread dipole", "SI optics synchrotron frequency")', symbol=r'<math>\sigma_{\text{s, dip}}</math>', units='mm', deps=[], ),
Parameter(name='SI optics natural bunch duration dipole', group='FAC', is_derived=True, value='bunch_duration("SI optics natural bunch length dipole", "SI beam beta factor")', symbol=r'<math>\sigma_{\text{t, dip}}</math>', units='ps', deps=[], ),
Parameter(name='SI optics radiation damping time horizontal dipole', group='FAC', is_derived=True, value='damping_time("SI beam energy", "SI optics radiation integral i2 dipole", "SI optics damping partition number horizontal dipole", "SI lattice circumference")', symbol=r'<math>\alpha_{\text{x, dip}}</math>', units='ms', deps=[], ),
Parameter(name='SI optics radiation damping time vertical dipole', group='FAC', is_derived=True, value='damping_time("SI beam energy", "SI optics radiation integral i2 dipole", "SI optics damping partition number vertical dipole", "SI lattice circumference")', symbol=r'<math>\alpha_{\text{y, dip}}</math>', units='ms', deps=[], ),
Parameter(name='SI optics radiation damping time longitudinal dipole', group='FAC', is_derived=True, value='damping_time("SI beam energy", "SI optics radiation integral i2 dipole", "SI optics damping partition number longitudinal dipole", "SI lattice circumference")', symbol=r'<math>\alpha_{\text{s, dip}}</math>', units='ms', deps=[], ),
Parameter(name='SI optics energy loss per turn id', group='FAC',is_derived=True, value='U0("SI beam energy", "SI optics radiation integral i2 id")', symbol=r'<math>U_\text{0,id}</math>', units='keV', deps=[], ),
Parameter(name='SI optics damping partition number vertical', group='FAC', is_derived=False, value=si_optics_damping_partition_number_vertical, symbol=r'<math>J_{\text{y}}</math>', units='', deps=[], obs=['Vertical damping partition number is identically one for error-free machines for which vertical dispersion functions are zero everywhere.', r'<math>J_{\text{y}} = 1 - \frac{I_\text{4y,dip}}{I_\text{2}} \equiv 1</math>'], ),
Parameter(name='SI optics damping partition number horizontal', group='FAC', is_derived=True, value='Jx("SI optics radiation integral i2", "SI optics radiation integral i4")', symbol=r'<math>J_{\text{x}}</math>', units='', deps=[], ),
Parameter(name='SI optics damping partition number longitudinal', group='FAC', is_derived=True, value='Js("SI optics damping partition number horizontal", "SI optics damping partition number vertical")', symbol=r'<math>J_{\text{s}}</math>', units='', deps=[], ),
Parameter(name='SI optics energy loss per turn', group='FAC', is_derived=True, value='U0("SI beam energy", "SI optics radiation integral i2")', symbol=r'<math>U_\text{0}</math>', units='keV', deps=[], ),
Parameter(name='SI optics radiation power', group='FAC', is_derived=True, value='radiation_power("SI optics energy loss per turn", "SI beam current")', symbol=r'<math>P</math>', units='kW', deps=[], ),
Parameter(name='SI optics overvoltage', group='FAC', is_derived=True, value='overvoltage("SI rf peak voltage", "SI optics energy loss per turn")', symbol=r'<math>q</math>', units='', deps=[], ),
Parameter(name='SI optics synchronous phase', group='FAC', is_derived=True, value='sync_phase("SI optics overvoltage")', symbol=r'<math>\phi_0</math>', units=unicode('°',encoding='utf-8'), deps=[], ),
Parameter(name='SI optics linear momentum compaction', group='FAC', is_derived=True, value='alpha1("SI optics radiation integral i1", "SI lattice circumference")', symbol=r'<math>\alpha_\text{1}</math>', units='', deps=[], ),
Parameter(name='SI optics linear slip phase', group='FAC', is_derived=True, value='slip_factor("SI optics linear momentum compaction", "SI beam gamma factor")', symbol=r'<math>\eta_{1}</math>', units='', deps=[], ),
Parameter(name='SI optics rf energy acceptance', group='FAC', is_derived=True, value='rf_energy_acceptance("SI optics overvoltage", "SI beam energy", "SI optics energy loss per turn", "SI rf harmonic number", "SI optics linear momentum compaction")', symbol=r'<math>\epsilon_{\text{max}}</math>', units='%', deps=[], ),
Parameter(name='SI optics natural emittance', group='FAC', is_derived=True, value='natural_emittance("SI beam gamma factor", "SI optics damping partition number horizontal", "SI optics radiation integral i2", "SI optics radiation integral i5")', symbol=r'<math>\epsilon_{0}</math>', units=unicode('nm⋅rad',encoding='utf-8'), deps=[], ),
Parameter(name='SI optics natural energy spread', group='FAC', is_derived=True, value='energy_spread("SI beam gamma factor", "SI optics radiation integral i2", "SI optics radiation integral i3", "SI optics radiation integral i4")', symbol=r'<math>\sigma_{\delta,\text{dip}}</math>', units='%', deps=[], ),
Parameter(name='SI optics natural bunch length', group='FAC', is_derived=True, value='bunch_length("SI optics linear slip phase", "SI optics natural energy spread", "SI optics synchrotron frequency")', symbol=r'<math>\sigma_{\text{s, dip}}</math>', units='mm', deps=[], ),
Parameter(name='SI optics natural bunch duration', group='FAC', is_derived=True, value='bunch_duration("SI optics natural bunch length", "SI beam beta factor")', symbol=r'<math>\sigma_{\text{t, dip}}</math>', units='ps', deps=[], ),
Parameter(name='SI optics radiation damping time horizontal', group='FAC', is_derived=True, value='damping_time("SI beam energy", "SI optics radiation integral i2", "SI optics damping partition number horizontal", "SI lattice circumference")', symbol=r'<math>\alpha_{\text{x, dip}}</math>', units='ms', deps=[], ),
Parameter(name='SI optics radiation damping time vertical', group='FAC', is_derived=True, value='damping_time("SI beam energy", "SI optics radiation integral i2", "SI optics damping partition number vertical", "SI lattice circumference")', symbol=r'<math>\alpha_{\text{y, dip}}</math>', units='ms', deps=[], ),
Parameter(name='SI optics radiation damping time longitudinal', group='FAC', is_derived=True, value='damping_time("SI beam energy", "SI optics radiation integral i2", "SI optics damping partition number longitudinal", "SI lattice circumference")', symbol=r'<math>\alpha_{\text{s, dip}}</math>', units='ms', deps=[], ),
Parameter(name='SI error alignment dipole', group='FAC', is_derived=False, value=si_error_alignment_dipole, symbol=r'<math>E_{xy,\text{dip}}</math>', units=unicode('μm', encoding='utf-8'), deps=[], obs=[r'Random transverse position error (standard deviation) for <math>x</math> and <math>y</math>.', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error alignment quadrupole', group='FAC', is_derived=False, value=si_error_alignment_quadrupole, symbol=r'<math>E_{xy,\text{quad}}</math>', units=unicode('μm', encoding='utf-8'), deps=[], obs=[r'Random transverse position error (standard deviation) for <math>x</math> and <math>y</math>.', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error alignment sextupole', group='FAC', is_derived=False, value=si_error_alignment_sextupole, symbol=r'<math>E_{xy,\text{sext}}</math>', units=unicode('μm', encoding='utf-8'), deps=[], obs=[r'Random transverse position error (standard deviation) for <math>x</math> and <math>y</math>.', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error roll dipole', group='FAC', is_derived=False, value=si_error_roll_dipole, symbol=r'<math>E_{\theta,\text{dip}}</math>', units='mrad', deps=[], obs=[r'Random rotation error (standard deviation) around longitudinal axis.', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error roll quadrupole', group='FAC', is_derived=False, value=si_error_roll_quadrupole, symbol=r'<math>E_{\theta,\text{quad}}</math>', units='mrad', deps=[], obs=[r'Random rotation error (standard deviation) around longitudinal axis.', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error roll sextupole', group='FAC', is_derived=False, value=si_error_roll_sextupole, symbol=r'<math>E_{\theta,\text{sext}}</math>', units='mrad', deps=[], obs=[r'Random rotation error (standard deviation) around longitudinal axis.', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error excitation dipole', group='FAC', is_derived=False, value=si_error_excitation_dipole, symbol=r'<math>E_{\text{exc,dip}}</math>', units='%', deps=[], obs=[r'Static or low frequency random excitation error (standard deviation).', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error excitation quadrupole', group='FAC', is_derived=False, value=si_error_excitation_quadrupole, symbol=r'<math>E_{\text{exc,quad}}</math>', units='%', deps=[], obs=[r'Static or low frequency random excitation error (standard deviation).', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error excitation sextupole', group='FAC', is_derived=False, value=si_error_excitation_sextupole, symbol=r'<math>E_{\text{exc,sext}}</math>', units='%', deps=[], obs=[r'Static or low frequency random excitation error (standard deviation).', unicode('Simulations assume Gaussian distribution truncated at ±2σ.', encoding='utf-8')], ),
Parameter(name='SI error ripple dipole', group='FAC', is_derived=False, value=si_error_ripple_dipole, symbol=r'<math>E_\text{ripp, dip}</math>', units='ppm', deps=[], obs=[], ),
Parameter(name='SI error ripple quadrupole', group='FAC', is_derived=False, value=si_error_ripple_quadrupole, symbol=r'<math>E_\text{ripp, quad}</math>', units='ppm', deps=[], obs=[], ),
Parameter(name='SI error ripple sextupole', group='FAC', is_derived=False, value=si_error_ripple_sextupole, symbol=r'<math>E_\text{ripp, sext}</math>', units='ppm', deps=[], obs=[], ),
Parameter(name='SI error vibration dipole', group='FAC', is_derived=False, value=si_error_vibration_dipole, symbol=r'<math>E_\text{vib, dip}</math>', units='nm', deps=[], obs=[], ),
Parameter(name='SI error vibration quadrupole', group='FAC', is_derived=False, value=si_error_vibration_quadrupole, symbol=r'<math>E_\text{vib, quad}</math>', units='nm', deps=[], obs=[], ),
Parameter(name='SI error vibration sextupole', group='FAC', is_derived=False, value=si_error_vibration_sextupole, symbol=r'<math>E_\text{vib, sext}</math>', units='nm', deps=[], obs=[], ),
]
| 178.652632
| 464
| 0.711937
| 10,427
| 67,888
| 4.466673
| 0.033087
| 0.0505
| 0.071499
| 0.080302
| 0.9498
| 0.928029
| 0.871688
| 0.832804
| 0.774016
| 0.673165
| 0
| 0.025309
| 0.096703
| 67,888
| 379
| 465
| 179.124011
| 0.733885
| 0.006349
| 0
| 0
| 0
| 0.270115
| 0.498327
| 0.10538
| 0.017241
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005747
| 0
| 0.005747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b62c28b63adbdb5f6314e0379a097b8e48141fad
| 437
|
py
|
Python
|
templates/database/mongodb/actions.py
|
Jumpscale/ays_jumpscale8
|
4ff4a2fb3b95de6f46ea494bd5b5a2a0fb9ecdb1
|
[
"Apache-2.0"
] | null | null | null |
templates/database/mongodb/actions.py
|
Jumpscale/ays_jumpscale8
|
4ff4a2fb3b95de6f46ea494bd5b5a2a0fb9ecdb1
|
[
"Apache-2.0"
] | 148
|
2016-03-21T08:45:34.000Z
|
2021-09-08T12:28:55.000Z
|
templates/database/mongodb/actions.py
|
Jumpscale/ays_jumpscale8
|
4ff4a2fb3b95de6f46ea494bd5b5a2a0fb9ecdb1
|
[
"Apache-2.0"
] | null | null | null |
def install(job):
cuisine = job.service.executor.cuisine
name = 'mongod_%s' % job.service.name
cuisine.apps.mongodb.install(start=True, name=name)
def start(job):
cuisine = job.service.executor.cuisine
name = 'mongod_%s' % job.service.name
cuisine.apps.mongodb.start(name)
def stop(job):
cuisine = job.service.executor.cuisine
name = 'mongod_%s' % job.service.name
cuisine.apps.mongodb.stop(name)
| 25.705882
| 55
| 0.693364
| 60
| 437
| 5
| 0.233333
| 0.2
| 0.13
| 0.2
| 0.78
| 0.78
| 0.78
| 0.78
| 0.78
| 0.78
| 0
| 0
| 0.169336
| 437
| 16
| 56
| 27.3125
| 0.826446
| 0
| 0
| 0.5
| 0
| 0
| 0.061785
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fcefc9a3927d12c2a793ef1684506178befa7826
| 32,711
|
py
|
Python
|
sdk/python/pulumi_azure/management/group_template_deployment.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/management/group_template_deployment.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/management/group_template_deployment.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['GroupTemplateDeploymentArgs', 'GroupTemplateDeployment']
@pulumi.input_type
class GroupTemplateDeploymentArgs:
def __init__(__self__, *,
management_group_id: pulumi.Input[str],
debug_level: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parameters_content: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
template_content: Optional[pulumi.Input[str]] = None,
template_spec_version_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a GroupTemplateDeployment resource.
:param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`.
:param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created.
:param pulumi.Input[str] name: The name which should be used for this Template Deployment. Changing this forces a new Template Deployment to be created.
:param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template.
:param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`.
:param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`.
"""
pulumi.set(__self__, "management_group_id", management_group_id)
if debug_level is not None:
pulumi.set(__self__, "debug_level", debug_level)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if parameters_content is not None:
pulumi.set(__self__, "parameters_content", parameters_content)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if template_content is not None:
pulumi.set(__self__, "template_content", template_content)
if template_spec_version_id is not None:
pulumi.set(__self__, "template_spec_version_id", template_spec_version_id)
@property
@pulumi.getter(name="managementGroupId")
def management_group_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "management_group_id")
@management_group_id.setter
def management_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "management_group_id", value)
@property
@pulumi.getter(name="debugLevel")
def debug_level(self) -> Optional[pulumi.Input[str]]:
"""
The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`.
"""
return pulumi.get(self, "debug_level")
@debug_level.setter
def debug_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "debug_level", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The Azure Region where the Template should exist. Changing this forces a new Template to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Template Deployment. Changing this forces a new Template Deployment to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="parametersContent")
def parameters_content(self) -> Optional[pulumi.Input[str]]:
"""
The contents of the ARM Template parameters file - containing a JSON list of parameters.
"""
return pulumi.get(self, "parameters_content")
@parameters_content.setter
def parameters_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parameters_content", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags which should be assigned to the Template.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="templateContent")
def template_content(self) -> Optional[pulumi.Input[str]]:
"""
The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`.
"""
return pulumi.get(self, "template_content")
@template_content.setter
def template_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_content", value)
@property
@pulumi.getter(name="templateSpecVersionId")
def template_spec_version_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`.
"""
return pulumi.get(self, "template_spec_version_id")
@template_spec_version_id.setter
def template_spec_version_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_spec_version_id", value)
@pulumi.input_type
class _GroupTemplateDeploymentState:
def __init__(__self__, *,
debug_level: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
management_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
output_content: Optional[pulumi.Input[str]] = None,
parameters_content: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
template_content: Optional[pulumi.Input[str]] = None,
template_spec_version_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering GroupTemplateDeployment resources.
:param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`.
:param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created.
:param pulumi.Input[str] name: The name which should be used for this Template Deployment. Changing this forces a new Template Deployment to be created.
:param pulumi.Input[str] output_content: The JSON Content of the Outputs of the ARM Template Deployment.
:param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template.
:param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`.
:param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`.
"""
if debug_level is not None:
pulumi.set(__self__, "debug_level", debug_level)
if location is not None:
pulumi.set(__self__, "location", location)
if management_group_id is not None:
pulumi.set(__self__, "management_group_id", management_group_id)
if name is not None:
pulumi.set(__self__, "name", name)
if output_content is not None:
pulumi.set(__self__, "output_content", output_content)
if parameters_content is not None:
pulumi.set(__self__, "parameters_content", parameters_content)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if template_content is not None:
pulumi.set(__self__, "template_content", template_content)
if template_spec_version_id is not None:
pulumi.set(__self__, "template_spec_version_id", template_spec_version_id)
@property
@pulumi.getter(name="debugLevel")
def debug_level(self) -> Optional[pulumi.Input[str]]:
"""
The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`.
"""
return pulumi.get(self, "debug_level")
@debug_level.setter
def debug_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "debug_level", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The Azure Region where the Template should exist. Changing this forces a new Template to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="managementGroupId")
def management_group_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "management_group_id")
@management_group_id.setter
def management_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "management_group_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Template Deployment. Changing this forces a new Template Deployment to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="outputContent")
def output_content(self) -> Optional[pulumi.Input[str]]:
"""
The JSON Content of the Outputs of the ARM Template Deployment.
"""
return pulumi.get(self, "output_content")
@output_content.setter
def output_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "output_content", value)
@property
@pulumi.getter(name="parametersContent")
def parameters_content(self) -> Optional[pulumi.Input[str]]:
"""
The contents of the ARM Template parameters file - containing a JSON list of parameters.
"""
return pulumi.get(self, "parameters_content")
@parameters_content.setter
def parameters_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parameters_content", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags which should be assigned to the Template.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="templateContent")
def template_content(self) -> Optional[pulumi.Input[str]]:
"""
The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`.
"""
return pulumi.get(self, "template_content")
@template_content.setter
def template_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_content", value)
@property
@pulumi.getter(name="templateSpecVersionId")
def template_spec_version_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`.
"""
return pulumi.get(self, "template_spec_version_id")
@template_spec_version_id.setter
def template_spec_version_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_spec_version_id", value)
class GroupTemplateDeployment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
debug_level: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
management_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parameters_content: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
template_content: Optional[pulumi.Input[str]] = None,
template_spec_version_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Template Deployment at a Management Group Scope.
> **Note:** Deleting a Deployment at the Management Group Scope will not delete any resources created by the deployment.
> **Note:** Deployments to a Management Group are always Incrementally applied. Existing resources that are not part of the template will not be removed.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_group = azure.management.get_group(name="00000000-0000-0000-0000-000000000000")
example_group_template_deployment = azure.management.GroupTemplateDeployment("exampleGroupTemplateDeployment",
location="West Europe",
management_group_id=example_group.id,
template_content=\"\"\"{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"policyAssignmentName": {
"type": "string",
"defaultValue": "[guid(parameters('policyDefinitionID'), resourceGroup().name)]",
"metadata": {
"description": "Specifies the name of the policy assignment, can be used defined or an idempotent name as the defaultValue provides."
}
},
"policyDefinitionID": {
"type": "string",
"metadata": {
"description": "Specifies the ID of the policy definition or policy set definition being assigned."
}
}
},
"resources": [
{
"type": "Microsoft.Authorization/policyAssignments",
"name": "[parameters('policyAssignmentName')]",
"apiVersion": "2019-09-01",
"properties": {
"scope": "[subscriptionResourceId('Microsoft.Resources/resourceGroups', resourceGroup().name)]",
"policyDefinitionId": "[parameters('policyDefinitionID')]"
}
}
]
}
\"\"\",
parameters_content=\"\"\"{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"policyDefinitionID": {
"value": "/providers/Microsoft.Authorization/policyDefinitions/0a914e76-4921-4c19-b460-a2d36003525a"
}
}
}
\"\"\")
```
```python
import pulumi
import pulumi_azure as azure
example_group = azure.management.get_group(name="00000000-0000-0000-0000-000000000000")
example_group_template_deployment = azure.management.GroupTemplateDeployment("exampleGroupTemplateDeployment",
location="West Europe",
management_group_id=example_group.id,
template_content=(lambda path: open(path).read())("templates/example-deploy-template.json"),
parameters_content=(lambda path: open(path).read())("templates/example-deploy-params.json"))
```
```python
import pulumi
import pulumi_azure as azure
example_group = azure.management.get_group(name="00000000-0000-0000-0000-000000000000")
example_template_spec_version = azure.core.get_template_spec_version(name="exampleTemplateForManagementGroup",
resource_group_name="exampleResourceGroup",
version="v1.0.9")
example_group_template_deployment = azure.management.GroupTemplateDeployment("exampleGroupTemplateDeployment",
location="West Europe",
management_group_id=example_group.id,
template_spec_version_id=example_template_spec_version.id)
```
## Import
Management Group Template Deployments can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:management/groupTemplateDeployment:GroupTemplateDeployment example /providers/Microsoft.Management/managementGroups/my-management-group-id/providers/Microsoft.Resources/deployments/deploy1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`.
:param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created.
:param pulumi.Input[str] name: The name which should be used for this Template Deployment. Changing this forces a new Template Deployment to be created.
:param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template.
:param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`.
:param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GroupTemplateDeploymentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Template Deployment at a Management Group Scope.
> **Note:** Deleting a Deployment at the Management Group Scope will not delete any resources created by the deployment.
> **Note:** Deployments to a Management Group are always Incrementally applied. Existing resources that are not part of the template will not be removed.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_group = azure.management.get_group(name="00000000-0000-0000-0000-000000000000")
example_group_template_deployment = azure.management.GroupTemplateDeployment("exampleGroupTemplateDeployment",
location="West Europe",
management_group_id=example_group.id,
template_content=\"\"\"{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"policyAssignmentName": {
"type": "string",
"defaultValue": "[guid(parameters('policyDefinitionID'), resourceGroup().name)]",
"metadata": {
"description": "Specifies the name of the policy assignment, can be used defined or an idempotent name as the defaultValue provides."
}
},
"policyDefinitionID": {
"type": "string",
"metadata": {
"description": "Specifies the ID of the policy definition or policy set definition being assigned."
}
}
},
"resources": [
{
"type": "Microsoft.Authorization/policyAssignments",
"name": "[parameters('policyAssignmentName')]",
"apiVersion": "2019-09-01",
"properties": {
"scope": "[subscriptionResourceId('Microsoft.Resources/resourceGroups', resourceGroup().name)]",
"policyDefinitionId": "[parameters('policyDefinitionID')]"
}
}
]
}
\"\"\",
parameters_content=\"\"\"{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"policyDefinitionID": {
"value": "/providers/Microsoft.Authorization/policyDefinitions/0a914e76-4921-4c19-b460-a2d36003525a"
}
}
}
\"\"\")
```
```python
import pulumi
import pulumi_azure as azure
example_group = azure.management.get_group(name="00000000-0000-0000-0000-000000000000")
example_group_template_deployment = azure.management.GroupTemplateDeployment("exampleGroupTemplateDeployment",
location="West Europe",
management_group_id=example_group.id,
template_content=(lambda path: open(path).read())("templates/example-deploy-template.json"),
parameters_content=(lambda path: open(path).read())("templates/example-deploy-params.json"))
```
```python
import pulumi
import pulumi_azure as azure
example_group = azure.management.get_group(name="00000000-0000-0000-0000-000000000000")
example_template_spec_version = azure.core.get_template_spec_version(name="exampleTemplateForManagementGroup",
resource_group_name="exampleResourceGroup",
version="v1.0.9")
example_group_template_deployment = azure.management.GroupTemplateDeployment("exampleGroupTemplateDeployment",
location="West Europe",
management_group_id=example_group.id,
template_spec_version_id=example_template_spec_version.id)
```
## Import
Management Group Template Deployments can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:management/groupTemplateDeployment:GroupTemplateDeployment example /providers/Microsoft.Management/managementGroups/my-management-group-id/providers/Microsoft.Resources/deployments/deploy1
```
:param str resource_name: The name of the resource.
:param GroupTemplateDeploymentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GroupTemplateDeploymentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
debug_level: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
management_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parameters_content: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
template_content: Optional[pulumi.Input[str]] = None,
template_spec_version_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GroupTemplateDeploymentArgs.__new__(GroupTemplateDeploymentArgs)
__props__.__dict__["debug_level"] = debug_level
__props__.__dict__["location"] = location
if management_group_id is None and not opts.urn:
raise TypeError("Missing required property 'management_group_id'")
__props__.__dict__["management_group_id"] = management_group_id
__props__.__dict__["name"] = name
__props__.__dict__["parameters_content"] = parameters_content
__props__.__dict__["tags"] = tags
__props__.__dict__["template_content"] = template_content
__props__.__dict__["template_spec_version_id"] = template_spec_version_id
__props__.__dict__["output_content"] = None
super(GroupTemplateDeployment, __self__).__init__(
'azure:management/groupTemplateDeployment:GroupTemplateDeployment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
debug_level: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
management_group_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
output_content: Optional[pulumi.Input[str]] = None,
parameters_content: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
template_content: Optional[pulumi.Input[str]] = None,
template_spec_version_id: Optional[pulumi.Input[str]] = None) -> 'GroupTemplateDeployment':
"""
Get an existing GroupTemplateDeployment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] debug_level: The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`.
:param pulumi.Input[str] location: The Azure Region where the Template should exist. Changing this forces a new Template to be created.
:param pulumi.Input[str] name: The name which should be used for this Template Deployment. Changing this forces a new Template Deployment to be created.
:param pulumi.Input[str] output_content: The JSON Content of the Outputs of the ARM Template Deployment.
:param pulumi.Input[str] parameters_content: The contents of the ARM Template parameters file - containing a JSON list of parameters.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Template.
:param pulumi.Input[str] template_content: The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`.
:param pulumi.Input[str] template_spec_version_id: The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GroupTemplateDeploymentState.__new__(_GroupTemplateDeploymentState)
__props__.__dict__["debug_level"] = debug_level
__props__.__dict__["location"] = location
__props__.__dict__["management_group_id"] = management_group_id
__props__.__dict__["name"] = name
__props__.__dict__["output_content"] = output_content
__props__.__dict__["parameters_content"] = parameters_content
__props__.__dict__["tags"] = tags
__props__.__dict__["template_content"] = template_content
__props__.__dict__["template_spec_version_id"] = template_spec_version_id
return GroupTemplateDeployment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="debugLevel")
def debug_level(self) -> pulumi.Output[Optional[str]]:
"""
The Debug Level which should be used for this Resource Group Template Deployment. Possible values are `none`, `requestContent`, `responseContent` and `requestContent, responseContent`.
"""
return pulumi.get(self, "debug_level")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The Azure Region where the Template should exist. Changing this forces a new Template to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="managementGroupId")
def management_group_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "management_group_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this Template Deployment. Changing this forces a new Template Deployment to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outputContent")
def output_content(self) -> pulumi.Output[str]:
"""
The JSON Content of the Outputs of the ARM Template Deployment.
"""
return pulumi.get(self, "output_content")
@property
@pulumi.getter(name="parametersContent")
def parameters_content(self) -> pulumi.Output[str]:
"""
The contents of the ARM Template parameters file - containing a JSON list of parameters.
"""
return pulumi.get(self, "parameters_content")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags which should be assigned to the Template.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="templateContent")
def template_content(self) -> pulumi.Output[str]:
"""
The contents of the ARM Template which should be deployed into this Resource Group. Cannot be specified with `template_spec_version_id`.
"""
return pulumi.get(self, "template_content")
@property
@pulumi.getter(name="templateSpecVersionId")
def template_spec_version_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the Template Spec Version to deploy. Cannot be specified with `template_content`.
"""
return pulumi.get(self, "template_spec_version_id")
| 48.460741
| 230
| 0.662652
| 3,666
| 32,711
| 5.712493
| 0.068467
| 0.064607
| 0.072199
| 0.067233
| 0.905501
| 0.893802
| 0.885971
| 0.877662
| 0.873365
| 0.861666
| 0
| 0.012639
| 0.238085
| 32,711
| 674
| 231
| 48.532641
| 0.827629
| 0.460671
| 0
| 0.788274
| 1
| 0
| 0.105691
| 0.028388
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162866
| false
| 0.003257
| 0.016287
| 0.009772
| 0.276873
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1e00c3c55f9e1252d7ce9a97de1b7b06ffdc307d
| 6,081
|
py
|
Python
|
modules/tryton.py
|
dozymoe/fireh_runner
|
e431ff64d4c1952ee1ddfa113abce3c0447bc3cd
|
[
"MIT"
] | 2
|
2017-07-11T09:23:36.000Z
|
2018-05-16T02:26:51.000Z
|
modules/tryton.py
|
dozymoe/fireh_runner
|
e431ff64d4c1952ee1ddfa113abce3c0447bc3cd
|
[
"MIT"
] | null | null | null |
modules/tryton.py
|
dozymoe/fireh_runner
|
e431ff64d4c1952ee1ddfa113abce3c0447bc3cd
|
[
"MIT"
] | 1
|
2018-04-04T14:52:02.000Z
|
2018-04-04T14:52:02.000Z
|
""" Tryton module.
Tryton is a three-tier high-level general purpose application platform under
the license GPL-3 written in Python and using PostgreSQL as database engine.
It is the core base of a complete business solution providing modularity,
scalability and security.
Website: http://www.tryton.org
"""
import os
SHELL_TIMEOUT = None
def trytond(loader, project=None, variant=None, *args): #pylint:disable=keyword-arg-before-vararg
project, variant = loader.setup_project_env(project, variant)
loader.setup_virtualenv()
loader.setup_shell_env()
config = loader.get_project_config()
python_bin = loader.get_python_bin()
database_name = config.get('tryton.database.name', project)
config_file = config.get('tryton.config_file')
if config_file:
config_file = os.path.join(loader.config['work_dir'],
config_file)
logging_config_file = config.get('tryton.logging.config_file')
if logging_config_file:
logging_config_file = os.path.join(loader.config['work_dir'],
logging_config_file)
work_dir = config.get('work_dir', project)
work_dir = loader.expand_path(work_dir)
bindir = config.get('trytond.bin_dir')
if bindir:
binargs = [python_bin, os.path.join(loader.config['work_dir'], bindir,
'trytond')]
else:
binargs = loader.get_binargs('trytond')
if config_file:
binargs += ['--config', config_file]
if database_name:
binargs += ['--database', database_name]
if logging_config_file:
binargs += ['--logconf', logging_config_file]
binargs += list(args)
os.chdir(work_dir)
os.execvp(binargs[0], binargs)
def trytond_admin(loader, project=None, variant=None, *args): #pylint:disable=keyword-arg-before-vararg
project, variant = loader.setup_project_env(project, variant)
loader.setup_virtualenv()
loader.setup_shell_env()
config = loader.get_project_config()
python_bin = loader.get_python_bin()
database_name = config.get('tryton.database.name', project)
config_file = config.get('tryton.config_file')
config_file = os.path.join(loader.config['work_dir'],
config_file)
work_dir = config.get('work_dir', project)
work_dir = loader.expand_path(work_dir)
bindir = config.get('trytond.bin_dir')
if bindir:
binargs = [python_bin, os.path.join(loader.config['work_dir'], bindir,
'trytond-admin')]
else:
binargs = loader.get_binargs('trytond-admin')
if config_file:
binargs += ['--config', config_file]
if database_name:
binargs += ['--database', database_name]
binargs += list(args)
os.chdir(work_dir)
os.execvp(binargs[0], binargs)
def trytond_cron(loader, project=None, variant=None, *args): #pylint:disable=keyword-arg-before-vararg
project, variant = loader.setup_project_env(project, variant)
loader.setup_virtualenv()
loader.setup_shell_env()
config = loader.get_project_config()
python_bin = loader.get_python_bin()
database_name = config.get('tryton.database.name', project)
config_file = config.get('tryton.config_file')
if config_file:
config_file = os.path.join(loader.config['work_dir'],
config_file)
work_dir = config.get('work_dir', project)
work_dir = loader.expand_path(work_dir)
bindir = config.get('trytond.bin_dir')
if bindir:
binargs = [python_bin, os.path.join(loader.config['work_dir'], bindir,
'trytond-cron')]
else:
binargs = loader.get_binargs('trytond-cron')
if config_file:
binargs += ['--config', config_file]
if database_name:
binargs += ['--database', database_name]
binargs += list(args)
os.chdir(work_dir)
os.execvp(binargs[0], binargs)
def trytond_worker(loader, project=None, variant=None, *args): #pylint:disable=keyword-arg-before-vararg
project, variant = loader.setup_project_env(project, variant)
loader.setup_virtualenv()
loader.setup_shell_env()
config = loader.get_project_config()
python_bin = loader.get_python_bin()
database_name = config.get('tryton.database.name', project)
config_file = config.get('tryton.config_file')
if config_file:
config_file = os.path.join(loader.config['work_dir'],
config_file)
work_dir = config.get('work_dir', project)
work_dir = loader.expand_path(work_dir)
bindir = config.get('trytond.bin_dir')
if bindir:
binargs = [python_bin, os.path.join(loader.config['work_dir'], bindir,
'trytond-worker')]
else:
binargs = loader.get_binargs('trytond-worker')
if config_file:
binargs += ['--config', config_file]
if database_name:
binargs += ['--database', database_name]
binargs += list(args)
os.chdir(work_dir)
os.execvp(binargs[0], binargs)
def tryton(loader, project=None, variant=None, *args): #pylint:disable=keyword-arg-before-vararg
project, variant = loader.setup_project_env(project, variant)
loader.setup_virtualenv()
loader.setup_shell_env()
config = loader.get_project_config()
python_bin = loader.get_python_bin()
database_name = config.get('tryton.database.name', project)
config_file = config.get('tryton.config_file')
if config_file:
config_file = os.path.join(loader.config['work_dir'],
config_file)
work_dir = config.get('work_dir', project)
work_dir = loader.expand_path(work_dir)
bindir = config.get('tryton.bin_dir')
if bindir:
binargs = [python_bin, os.path.join(loader.config['work_dir'], bindir,
'tryton')]
else:
binargs = loader.get_binargs('tryton')
if config_file:
binargs += ['--config', config_file]
if database_name:
binargs += ['--database', database_name]
binargs += list(args)
os.chdir(work_dir)
os.execvp(binargs[0], binargs)
commands = (trytond, trytond_admin, trytond_cron, trytond_worker, tryton)
| 33.229508
| 104
| 0.675382
| 777
| 6,081
| 5.060489
| 0.109395
| 0.104273
| 0.045778
| 0.044761
| 0.875636
| 0.856307
| 0.821719
| 0.821719
| 0.821719
| 0.811801
| 0
| 0.001234
| 0.200132
| 6,081
| 182
| 105
| 33.412088
| 0.807155
| 0.082717
| 0
| 0.833333
| 0
| 0
| 0.11157
| 0.004671
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036232
| false
| 0
| 0.007246
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e22b658c2cc77396c69500bafece5ee42e65bf9
| 197
|
py
|
Python
|
tools/accuracy_checker/accuracy_checker/pipeline_connectors/__init__.py
|
zhoub/dldt
|
e42c01cf6e1d3aefa55e2c5df91f1054daddc575
|
[
"Apache-2.0"
] | null | null | null |
tools/accuracy_checker/accuracy_checker/pipeline_connectors/__init__.py
|
zhoub/dldt
|
e42c01cf6e1d3aefa55e2c5df91f1054daddc575
|
[
"Apache-2.0"
] | null | null | null |
tools/accuracy_checker/accuracy_checker/pipeline_connectors/__init__.py
|
zhoub/dldt
|
e42c01cf6e1d3aefa55e2c5df91f1054daddc575
|
[
"Apache-2.0"
] | null | null | null |
from .connectors import Connection, StageConnectionDescription, create_connection_description
__all__ = [
'Connection',
'StageConnectionDescription',
'create_connection_description'
]
| 24.625
| 93
| 0.796954
| 14
| 197
| 10.642857
| 0.571429
| 0.483221
| 0.563758
| 0.697987
| 0.845638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13198
| 197
| 7
| 94
| 28.142857
| 0.871345
| 0
| 0
| 0
| 0
| 0
| 0.329949
| 0.279188
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e67557b2a7833cf4bdee0226d0f0729c2941dd3
| 7,516
|
py
|
Python
|
userbot/modules/imagefun.py
|
TAMILVIP007/javes-3.0
|
d9238785fa2d79740bbb526aca92455dbccb3838
|
[
"MIT"
] | null | null | null |
userbot/modules/imagefun.py
|
TAMILVIP007/javes-3.0
|
d9238785fa2d79740bbb526aca92455dbccb3838
|
[
"MIT"
] | null | null | null |
userbot/modules/imagefun.py
|
TAMILVIP007/javes-3.0
|
d9238785fa2d79740bbb526aca92455dbccb3838
|
[
"MIT"
] | null | null | null |
# DARKCOBRA ORIGINAL
# by @danish_00
# by #team dc
# Kangers Keep Credits
# Kepp Credits
import cv2
# by @danish_00
import os, scipy, sys, shutil
import numpy as np
import requests, re
from PIL import Image
from telegraph import upload_file
from telethon.tl.types import MessageMediaPhoto
from userbot import bot, CMD_HELP
from userbot.utils import admin_cmd
from userbot import bot as borg
#keep CREDIT LINES ELSE GET LOST
path = "./cv2/"
if not os.path.isdir(path):
os.makedirs(path)
@bot.on(admin_cmd("grey"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imwrite("danish.jpg", gray)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
# DARKCOBRA ORIGINAL
# by @danish_00
# by #team dc
# Kangers Keep Credits
# Kepp Credits
@bot.on(admin_cmd("blr"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
blur = cv2.GaussianBlur(frame, (35, 35), 0)
cv2.imwrite("danish.jpg", blur)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("invrt"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
invert = cv2.bitwise_not(frame)
cv2.imwrite("danish.jpg", invert)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("enhance"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
dtl = cv2.detailEnhance(frame, sigma_s=10, sigma_r=0.15)
cv2.imwrite("danish.jpg", dtl)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("smooth"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
smooth = cv2.edgePreservingFilter(frame, flags=1, sigma_s=60, sigma_r=0.4)
cv2.imwrite("danish.jpg", smooth)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("pencil"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
grey = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(grey, (3,3), 0)
output = cv2.Laplacian(blur, -1, ksize=5)
output = 255 - output
ret, output = cv2.threshold(output, 150, 255, cv2.THRESH_BINARY)
cv2.imwrite("danish.jpg", output)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("imgrey"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
invert = cv2.bitwise_not(frame)
gray = cv2.cvtColor(invert, cv2.COLOR_BGR2GRAY)
cv2.imwrite("danish.jpg", gray)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("emboss"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
kernel = np.array([[0,-1,-1],[1,0,-1], [1,1,0]])
emboss = cv2.filter2D(frame, -1, kernel)
cv2.imwrite("danish.jpg", emboss)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("shrp"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
kernel = np.array([[-1, -1, -1], [-1, 9, -1], [-1, -1, -1]])
sharp = cv2.filter2D(frame, -1, kernel)
cv2.imwrite("danish.jpg", sharp)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
@bot.on(admin_cmd("light"))
async def hehe(event):
if not event.reply_to_msg_id:
await event.reply("Reply to any Image.")
return
reply = await event.get_reply_message()
await event.edit('`Processing...`')
image = await bot.download_media(reply.media, path)
img = cv2.VideoCapture(image)
ret, frame = img.read()
brt = cv2.convertScaleAbs(frame, beta=90)
cv2.imwrite("danish.jpg", brt)
await event.client.send_file(event.chat_id, "danish.jpg", force_document=False, reply_to=event.reply_to_msg_id)
await event.delete()
shutil.rmtree(path)
os.remove("danish.jpg")
# DARKCOBRA ORIGINAL
# by @danish_00
# by #team dc
# Kangers Keep Credits
# Kepp Credits
| 31.579832
| 115
| 0.669638
| 1,087
| 7,516
| 4.485741
| 0.122355
| 0.102543
| 0.049221
| 0.061526
| 0.82219
| 0.820345
| 0.8105
| 0.8105
| 0.8105
| 0.793273
| 0
| 0.01721
| 0.195982
| 7,516
| 237
| 116
| 31.71308
| 0.789674
| 0.037387
| 0
| 0.744444
| 0
| 0
| 0.09681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
94ae857f75237c0ab8f2f550f27da36525803b04
| 220
|
py
|
Python
|
torchsso/utils/__init__.py
|
jjxu217/pytorch-sso
|
124954a5588120885e2f017c99db7fc540d5b9ab
|
[
"MIT"
] | 121
|
2019-10-10T16:09:52.000Z
|
2022-03-03T01:28:12.000Z
|
torchsso/utils/__init__.py
|
jjxu217/pytorch-sso
|
124954a5588120885e2f017c99db7fc540d5b9ab
|
[
"MIT"
] | 3
|
2020-02-27T23:03:46.000Z
|
2022-02-18T07:08:23.000Z
|
torchsso/utils/__init__.py
|
jjxu217/pytorch-sso
|
124954a5588120885e2f017c99db7fc540d5b9ab
|
[
"MIT"
] | 20
|
2019-10-18T02:10:21.000Z
|
2022-01-17T19:28:58.000Z
|
from torchsso.utils.logger import Logger # NOQA
from torchsso.utils.inv_cupy import inv # NOQA
from torchsso.utils.cholesky_cupy import cholesky # NOQA
from torchsso.utils.accumulator import TensorAccumulator # NOQA
| 44
| 64
| 0.818182
| 30
| 220
| 5.933333
| 0.366667
| 0.269663
| 0.382022
| 0.353933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 220
| 4
| 65
| 55
| 0.927083
| 0.086364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bf7cf1e61c50548a27756b39391577ad803f5081
| 8,614
|
py
|
Python
|
tests/producer/test_apireaders.py
|
jjaakola/bang-a-gong
|
d30f889c18eeaff3d62d47cd02e93516e4d24dd7
|
[
"MIT"
] | null | null | null |
tests/producer/test_apireaders.py
|
jjaakola/bang-a-gong
|
d30f889c18eeaff3d62d47cd02e93516e4d24dd7
|
[
"MIT"
] | null | null | null |
tests/producer/test_apireaders.py
|
jjaakola/bang-a-gong
|
d30f889c18eeaff3d62d47cd02e93516e4d24dd7
|
[
"MIT"
] | null | null | null |
import asyncio
import time
import unittest
from aioresponses import CallbackResult, aioresponses
from api_status_monitor.producer.apireaders import create_reader
WORLD_CLOCK_RESPONSE_STRING = '{"$id":"1","currentDateTime":"2021-05-12T20:23Z","utcOffset":"00:00:00","isDayLightSavingsTime":false,"dayOfTheWeek":"Wednesday","timeZoneName":"UTC","currentFileTime":132653246052918702,"ordinalDate":"2021-132","serviceResponse":null}'
class TestWorldClockReader(unittest.IsolatedAsyncioTestCase):
@aioresponses()
async def test_read(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("http://worldclockapi.com/api/json/utc/now",
status=200, body=WORLD_CLOCK_RESPONSE_STRING)
reader = create_reader(name="worldclock", type="json", sla_ms=100,
site="http://worldclockapi.com",
endpoint="/api/json/utc/now",
json_path="$.currentDateTime")
s = await reader.read()
self.assertEqual(200, s.status_code)
self.assertTrue(s.success())
self.assertEqual("2021-05-12T20:23Z", s.log)
self.assertEqual("", s.error)
self.assertTrue(s.in_sla())
@aioresponses()
async def test_over_sla(self, mocked):
def request_callback(url, **kwargs):
# burn some time for sla to fail
time.sleep(0.5)
headers = {'content-type': 'application/json'}
return CallbackResult(status=200, body=WORLD_CLOCK_RESPONSE_STRING,
headers=headers)
loop = asyncio.get_event_loop()
mocked.get("http://worldclockapi.com/api/json/utc/now",
callback=request_callback)
reader = create_reader(name="worldclock", type="json", sla_ms=100,
site="http://worldclockapi.com",
endpoint="/api/json/utc/now",
json_path="$.currentDateTime")
s = await reader.read()
self.assertEqual(200, s.status_code)
self.assertTrue(s.success())
self.assertEqual("2021-05-12T20:23Z", s.log)
self.assertEqual("", s.error)
self.assertFalse(s.in_sla())
@aioresponses()
async def test_read_error_response(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("http://worldclockapi.com/api/json/utc/now",
status=500, body="")
reader = create_reader(name="worldclock", type="json", sla_ms=100,
site="http://worldclockapi.com",
endpoint="/api/json/utc/now",
json_path="$.currentDateTime")
s = await reader.read()
self.assertEqual(500, s.status_code)
self.assertFalse(s.success())
self.assertEqual("", s.log)
self.assertEqual("(worldclock) JSON parse error.", s.error)
self.assertTrue(s.in_sla())
@aioresponses()
async def test_read_invalid_json_response(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("http://worldclockapi.com/api/json/utc/now",
status=200, body="{}")
reader = create_reader(name="worldclock", type="json", sla_ms=100,
site="http://worldclockapi.com",
endpoint="/api/json/utc/now",
json_path="$.currentDateTime")
s = await reader.read()
self.assertEqual(200, s.status_code)
self.assertFalse(s.success())
self.assertEqual("", s.log)
self.assertEqual(
"(worldclock) Could not extract log with jsonpath '$.currentDateTime'.",
s.error)
self.assertTrue(s.in_sla())
@aioresponses()
async def test_read_error(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("http://worldclockapi.com/api/json/utc/now",
exception=Exception("error"))
reader = create_reader(name="worldclock", type="json", sla_ms=100,
site="http://worldclockapi.com",
endpoint="/api/json/utc/now",
json_path="$.currentDateTime")
s = await reader.read()
self.assertEqual(0, s.status_code)
self.assertFalse(s.success())
self.assertEqual("", s.log)
self.assertEqual("error", s.error)
self.assertTrue(s.in_sla())
@aioresponses()
async def test_read_timeout_error(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("http://worldclockapi.com/api/json/utc/now",
exception=asyncio.exceptions.TimeoutError)
reader = create_reader(name="worldclock", type="json", sla_ms=100,
site="http://worldclockapi.com",
endpoint="/api/json/utc/now",
json_path="$.currentDateTime")
s = await reader.read()
self.assertEqual(0, s.status_code)
self.assertFalse(s.success())
self.assertEqual("", s.log)
self.assertEqual("TIMEOUT", s.error)
self.assertTrue(s.in_sla())
JPX_RESPONSE_STRING = """
<html>
<title>Test title</title>
<body>
</body>
</html>
"""
class JapanStockExchangeReaderTest(unittest.IsolatedAsyncioTestCase):
@aioresponses()
async def test_read(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("https://www.jpx.co.jp",
status=200, body=JPX_RESPONSE_STRING)
reader = create_reader(name="jpx", type="regex", sla_ms=100,
site="https://www.jpx.co.jp",
endpoint="/",
regex="<title>([\w\s]*)</title>")
s = await reader.read()
self.assertEqual(200, s.status_code)
self.assertTrue(s.success())
self.assertEqual("Test title", s.log)
self.assertEqual("", s.error)
self.assertTrue(s.in_sla())
@aioresponses()
async def test_read_no_title(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("https://www.jpx.co.jp",
status=200, body="No title in the response")
reader = create_reader(name="jpx", type="regex", sla_ms=100,
site="https://www.jpx.co.jp",
endpoint="/",
regex="<title>([\w\s]*)</title>")
s = await reader.read()
self.assertEqual(200, s.status_code)
self.assertFalse(s.success())
self.assertEqual("", s.log)
self.assertEqual("(jpx) Could not extract log with regex '<title>([\\w\\s]*)</title>'.",
s.error)
self.assertTrue(s.in_sla())
AIVEN_RESPONSE_STRING = """
Freedom to build awesome applications
Aiven manages your open source data infrastructure in the cloud - so you don't have to.
"""
class AivenRootReaderTest(unittest.IsolatedAsyncioTestCase):
@aioresponses()
async def test_read(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("https://aiven.io",
status=200, body=AIVEN_RESPONSE_STRING)
reader = create_reader(name="aiven", type="regex", sla_ms=100,
site="https://aiven.io",
endpoint="/",
regex="(Freedom to build awesome applications)")
s = await reader.read()
self.assertEqual(200, s.status_code)
self.assertTrue(s.success())
self.assertEqual("Freedom to build awesome applications", s.log)
self.assertEqual("", s.error)
self.assertTrue(s.in_sla())
@aioresponses()
async def test_read_no_awesomeness(self, mocked):
loop = asyncio.get_event_loop()
mocked.get("https://aiven.io",
status=200, body="No awesomeness in the response")
reader = create_reader(name="aiven", type="regex", sla_ms=100,
site="https://aiven.io",
endpoint="/",
regex="(Freedom to build awesome applications)")
s = await reader.read()
self.assertEqual(200, s.status_code)
self.assertFalse(s.success())
self.assertEqual("", s.log)
self.assertEqual("(aiven) Could not extract log with regex '(Freedom to build awesome applications)'.",
s.error)
self.assertTrue(s.in_sla())
| 38.455357
| 267
| 0.568261
| 929
| 8,614
| 5.149623
| 0.148547
| 0.094064
| 0.040761
| 0.032609
| 0.809156
| 0.797659
| 0.772157
| 0.729724
| 0.729724
| 0.721572
| 0
| 0.024789
| 0.297539
| 8,614
| 223
| 268
| 38.627803
| 0.765824
| 0.003483
| 0
| 0.715084
| 0
| 0.005587
| 0.214752
| 0.036355
| 0
| 0
| 0
| 0
| 0.27933
| 1
| 0.005587
| false
| 0
| 0.027933
| 0
| 0.055866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44e0dd6246c9f20087460a59472dc06da940672d
| 4,394
|
py
|
Python
|
test/unit/test_vecod_data_shims.py
|
VecoOfficial/sentinel
|
a25583061902d3ceeec128f84f8978a7c729a93c
|
[
"MIT"
] | null | null | null |
test/unit/test_vecod_data_shims.py
|
VecoOfficial/sentinel
|
a25583061902d3ceeec128f84f8978a7c729a93c
|
[
"MIT"
] | null | null | null |
test/unit/test_vecod_data_shims.py
|
VecoOfficial/sentinel
|
a25583061902d3ceeec128f84f8978a7c729a93c
|
[
"MIT"
] | 2
|
2019-10-18T19:58:30.000Z
|
2019-10-20T19:16:44.000Z
|
import pytest
import sys
import os
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
import vecolib
@pytest.fixture
def sentinel_proposal_hex():
return '5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313032323830302c20226e616d65223a2022626565722d7265696d62757273656d656e742d37222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a20372e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202275726c223a202268747470733a2f2f6461736863656e7472616c2e636f6d2f626565722d7265696d62757273656d656e742d37227d5d'
@pytest.fixture
def sentinel_superblock_hex():
return '5b227375706572626c6f636b222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33227d5d'
@pytest.fixture
def vecod_proposal_hex():
return '5b5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313336383430302c20226e616d65223a2022626565722d7265696d62757273656d656e742d39222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a2034392e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202274797065223a20312c202275726c223a202268747470733a2f2f7777772e6461736863656e7472616c2e6f72672f702f626565722d7265696d62757273656d656e742d39227d5d5d'
@pytest.fixture
def vecod_superblock_hex():
return '5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33222c202274797065223a20327d5d5d'
# ========================================================================
def test_SHIM_deserialise_from_vecod(vecod_proposal_hex, vecod_superblock_hex):
assert vecolib.SHIM_deserialise_from_vecod(vecod_proposal_hex) == '5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313336383430302c20226e616d65223a2022626565722d7265696d62757273656d656e742d39222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a2034392e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202275726c223a202268747470733a2f2f7777772e6461736863656e7472616c2e6f72672f702f626565722d7265696d62757273656d656e742d39227d5d'
assert vecolib.SHIM_deserialise_from_vecod(vecod_superblock_hex) == '5b227375706572626c6f636b222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33227d5d'
def test_SHIM_serialise_for_vecod(sentinel_proposal_hex, sentinel_superblock_hex):
assert vecolib.SHIM_serialise_for_vecod(sentinel_proposal_hex) == '5b5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313032323830302c20226e616d65223a2022626565722d7265696d62757273656d656e742d37222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a20372e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202274797065223a20312c202275726c223a202268747470733a2f2f6461736863656e7472616c2e636f6d2f626565722d7265696d62757273656d656e742d37227d5d5d'
assert vecolib.SHIM_serialise_for_vecod(sentinel_superblock_hex) == '5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33222c202274797065223a20327d5d5d'
| 112.666667
| 578
| 0.934001
| 135
| 4,394
| 30
| 0.266667
| 0.008889
| 0.015802
| 0.017778
| 0.093333
| 0.086914
| 0.086914
| 0.020247
| 0.020247
| 0.020247
| 0
| 0.69168
| 0.023441
| 4,394
| 38
| 579
| 115.631579
| 0.252156
| 0.016386
| 0
| 0.166667
| 0
| 0
| 0.788194
| 0.782639
| 0
| 1
| 0
| 0
| 0.166667
| 1
| 0.25
| false
| 0
| 0.166667
| 0.166667
| 0.583333
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
44e3908bf5a456cba7904fdb4b547526fee2e160
| 12,642
|
py
|
Python
|
src/ufc-2.0.5/src/utils/python/ufc_utils/finite_element.py
|
szmurlor/fiver
|
083251420eb934d860c99dcf1eb07ae5b8ba7e8c
|
[
"Apache-2.0"
] | null | null | null |
src/ufc-2.0.5/src/utils/python/ufc_utils/finite_element.py
|
szmurlor/fiver
|
083251420eb934d860c99dcf1eb07ae5b8ba7e8c
|
[
"Apache-2.0"
] | null | null | null |
src/ufc-2.0.5/src/utils/python/ufc_utils/finite_element.py
|
szmurlor/fiver
|
083251420eb934d860c99dcf1eb07ae5b8ba7e8c
|
[
"Apache-2.0"
] | null | null | null |
# Code generation format strings for UFC (Unified Form-assembly Code) v. 2.0.5.
# This code is released into the public domain.
#
# The FEniCS Project (http://www.fenicsproject.org/) 2006-2011.
finite_element_combined = """\
/// This class defines the interface for a finite element.
class %(classname)s: public ufc::finite_element
{%(members)s
public:
/// Constructor
%(classname)s(%(constructor_arguments)s) : ufc::finite_element()%(initializer_list)s
{
%(constructor)s
}
/// Destructor
virtual ~%(classname)s()
{
%(destructor)s
}
/// Return a string identifying the finite element
virtual const char* signature() const
{
%(signature)s
}
/// Return the cell shape
virtual ufc::shape cell_shape() const
{
%(cell_shape)s
}
/// Return the topological dimension of the cell shape
virtual unsigned int topological_dimension() const
{
%(topological_dimension)s
}
/// Return the geometric dimension of the cell shape
virtual unsigned int geometric_dimension() const
{
%(geometric_dimension)s
}
/// Return the dimension of the finite element function space
virtual unsigned int space_dimension() const
{
%(space_dimension)s
}
/// Return the rank of the value space
virtual unsigned int value_rank() const
{
%(value_rank)s
}
/// Return the dimension of the value space for axis i
virtual unsigned int value_dimension(unsigned int i) const
{
%(value_dimension)s
}
/// Evaluate basis function i at given point in cell
virtual void evaluate_basis(unsigned int i,
double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis)s
}
/// Evaluate all basis functions at given point in cell
virtual void evaluate_basis_all(double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis_all)s
}
/// Evaluate order n derivatives of basis function i at given point in cell
virtual void evaluate_basis_derivatives(unsigned int i,
unsigned int n,
double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis_derivatives)s
}
/// Evaluate order n derivatives of all basis functions at given point in cell
virtual void evaluate_basis_derivatives_all(unsigned int n,
double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis_derivatives_all)s
}
/// Evaluate linear functional for dof i on the function f
virtual double evaluate_dof(unsigned int i,
const ufc::function& f,
const ufc::cell& c) const
{
%(evaluate_dof)s
}
/// Evaluate linear functionals for all dofs on the function f
virtual void evaluate_dofs(double* values,
const ufc::function& f,
const ufc::cell& c) const
{
%(evaluate_dofs)s
}
/// Interpolate vertex values from dof values
virtual void interpolate_vertex_values(double* vertex_values,
const double* dof_values,
const ufc::cell& c) const
{
%(interpolate_vertex_values)s
}
/// Map coordinate xhat from reference cell to coordinate x in cell
virtual void map_from_reference_cell(double* x,
const double* xhat,
const ufc::cell& c) const
{
%(map_from_reference_cell)s
}
/// Map from coordinate x in cell to coordinate xhat in reference cell
virtual void map_to_reference_cell(double* xhat,
const double* x,
const ufc::cell& c) const
{
%(map_to_reference_cell)s
}
/// Return the number of sub elements (for a mixed element)
virtual unsigned int num_sub_elements() const
{
%(num_sub_elements)s
}
/// Create a new finite element for sub element i (for a mixed element)
virtual ufc::finite_element* create_sub_element(unsigned int i) const
{
%(create_sub_element)s
}
/// Create a new class instance
virtual ufc::finite_element* create() const
{
%(create)s
}
};
"""
finite_element_header = """\
/// This class defines the interface for a finite element.
class %(classname)s: public ufc::finite_element
{%(members)s
public:
/// Constructor
%(classname)s(%(constructor_arguments)s);
/// Destructor
virtual ~%(classname)s();
/// Return a string identifying the finite element
virtual const char* signature() const;
/// Return the cell shape
virtual ufc::shape cell_shape() const;
/// Return the topological dimension of the cell shape
virtual unsigned int topological_dimension() const;
/// Return the geometric dimension of the cell shape
virtual unsigned int geometric_dimension() const;
/// Return the dimension of the finite element function space
virtual unsigned int space_dimension() const;
/// Return the rank of the value space
virtual unsigned int value_rank() const;
/// Return the dimension of the value space for axis i
virtual unsigned int value_dimension(unsigned int i) const;
/// Evaluate basis function i at given point in cell
virtual void evaluate_basis(unsigned int i,
double* values,
const double* coordinates,
const ufc::cell& c) const;
/// Evaluate all basis functions at given point in cell
virtual void evaluate_basis_all(double* values,
const double* coordinates,
const ufc::cell& c) const;
/// Evaluate order n derivatives of basis function i at given point in cell
virtual void evaluate_basis_derivatives(unsigned int i,
unsigned int n,
double* values,
const double* coordinates,
const ufc::cell& c) const;
/// Evaluate order n derivatives of all basis functions at given point in cell
virtual void evaluate_basis_derivatives_all(unsigned int n,
double* values,
const double* coordinates,
const ufc::cell& c) const;
/// Evaluate linear functional for dof i on the function f
virtual double evaluate_dof(unsigned int i,
const ufc::function& f,
const ufc::cell& c) const;
/// Evaluate linear functionals for all dofs on the function f
virtual void evaluate_dofs(double* values,
const ufc::function& f,
const ufc::cell& c) const;
/// Interpolate vertex values from dof values
virtual void interpolate_vertex_values(double* vertex_values,
const double* dof_values,
const ufc::cell& c) const;
/// Map coordinate xhat from reference cell to coordinate x in cell
virtual void map_from_reference_cell(double* x,
const double* xhat,
const ufc::cell& c) const;
/// Map from coordinate x in cell to coordinate xhat in reference cell
virtual void map_to_reference_cell(double* xhat,
const double* x,
const ufc::cell& c) const;
/// Return the number of sub elements (for a mixed element)
virtual unsigned int num_sub_elements() const;
/// Create a new finite element for sub element i (for a mixed element)
virtual ufc::finite_element* create_sub_element(unsigned int i) const;
/// Create a new class instance
virtual ufc::finite_element* create() const;
};
"""
finite_element_implementation= """\
/// Constructor
%(classname)s::%(classname)s(%(constructor_arguments)s) : ufc::finite_element()%(initializer_list)s
{
%(constructor)s
}
/// Destructor
%(classname)s::~%(classname)s()
{
%(destructor)s
}
/// Return a string identifying the finite element
const char* %(classname)s::signature() const
{
%(signature)s
}
/// Return the cell shape
ufc::shape %(classname)s::cell_shape() const
{
%(cell_shape)s
}
/// Return the topological dimension of the cell shape
unsigned int %(classname)s::topological_dimension() const
{
%(topological_dimension)s
}
/// Return the geometric dimension of the cell shape
unsigned int %(classname)s::geometric_dimension() const
{
%(geometric_dimension)s
}
/// Return the dimension of the finite element function space
unsigned int %(classname)s::space_dimension() const
{
%(space_dimension)s
}
/// Return the rank of the value space
unsigned int %(classname)s::value_rank() const
{
%(value_rank)s
}
/// Return the dimension of the value space for axis i
unsigned int %(classname)s::value_dimension(unsigned int i) const
{
%(value_dimension)s
}
/// Evaluate basis function i at given point in cell
void %(classname)s::evaluate_basis(unsigned int i,
double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis)s
}
/// Evaluate all basis functions at given point in cell
void %(classname)s::evaluate_basis_all(double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis_all)s
}
/// Evaluate order n derivatives of basis function i at given point in cell
void %(classname)s::evaluate_basis_derivatives(unsigned int i,
unsigned int n,
double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis_derivatives)s
}
/// Evaluate order n derivatives of all basis functions at given point in cell
void %(classname)s::evaluate_basis_derivatives_all(unsigned int n,
double* values,
const double* coordinates,
const ufc::cell& c) const
{
%(evaluate_basis_derivatives_all)s
}
/// Evaluate linear functional for dof i on the function f
double %(classname)s::evaluate_dof(unsigned int i,
const ufc::function& f,
const ufc::cell& c) const
{
%(evaluate_dof)s
}
/// Evaluate linear functionals for all dofs on the function f
void %(classname)s::evaluate_dofs(double* values,
const ufc::function& f,
const ufc::cell& c) const
{
%(evaluate_dofs)s
}
/// Interpolate vertex values from dof values
void %(classname)s::interpolate_vertex_values(double* vertex_values,
const double* dof_values,
const ufc::cell& c) const
{
%(interpolate_vertex_values)s
}
/// Map coordinate xhat from reference cell to coordinate x in cell
void %(classname)s::map_from_reference_cell(double* x,
const double* xhat,
const ufc::cell& c) const
{
%(map_from_reference_cell)s
}
/// Map from coordinate x in cell to coordinate xhat in reference cell
void %(classname)s::map_to_reference_cell(double* xhat,
const double* x,
const ufc::cell& c) const
{
%(map_to_reference_cell)s
}
/// Return the number of sub elements (for a mixed element)
unsigned int %(classname)s::num_sub_elements() const
{
%(num_sub_elements)s
}
/// Create a new finite element for sub element i (for a mixed element)
ufc::finite_element* %(classname)s::create_sub_element(unsigned int i) const
{
%(create_sub_element)s
}
/// Create a new class instance
ufc::finite_element* %(classname)s::create() const
{
%(create)s
}
"""
| 30.909535
| 99
| 0.590413
| 1,464
| 12,642
| 4.98429
| 0.072404
| 0.058791
| 0.044402
| 0.048102
| 0.950254
| 0.93326
| 0.925312
| 0.925312
| 0.918186
| 0.91243
| 0
| 0.001296
| 0.328429
| 12,642
| 408
| 100
| 30.985294
| 0.858186
| 0.014634
| 0
| 0.60479
| 0
| 0.002994
| 0.991246
| 0.172261
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44fcd668586db70a6f1c3a37e009d6cdacb11a43
| 4,029
|
py
|
Python
|
tests/snapshots/snap_test_fixed_elements.py
|
danpoland/slacktools
|
0a66f7dd7bbc32ac70f3c4855e5af51bf7d3d1c5
|
[
"MIT"
] | 7
|
2020-02-28T17:31:41.000Z
|
2021-08-23T17:57:47.000Z
|
tests/snapshots/snap_test_fixed_elements.py
|
danpoland/slacktools
|
0a66f7dd7bbc32ac70f3c4855e5af51bf7d3d1c5
|
[
"MIT"
] | 2
|
2020-03-31T11:13:49.000Z
|
2021-02-02T22:42:54.000Z
|
tests/snapshots/snap_test_fixed_elements.py
|
danpoland/slacktools
|
0a66f7dd7bbc32ac70f3c4855e5af51bf7d3d1c5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['TestFixedButton.test_fixed 1'] = {
'action_id': 'test_action',
'confirm': {
'confirm': {
'text': 'confirm',
'type': 'plain_text'
},
'deny': {
'text': 'deny',
'type': 'plain_text'
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'title': {
'text': 'title',
'type': 'plain_text'
}
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'type': 'button',
'url': 'http://crispy.dev',
'value': 'value'
}
snapshots['TestFixedButton.test_override[action_id-override] 1'] = {
'action_id': 'override',
'confirm': {
'confirm': {
'text': 'confirm',
'type': 'plain_text'
},
'deny': {
'text': 'deny',
'type': 'plain_text'
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'title': {
'text': 'title',
'type': 'plain_text'
}
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'type': 'button',
'url': 'http://crispy.dev',
'value': 'value'
}
snapshots['TestFixedButton.test_override[value-override] 1'] = {
'action_id': 'test_action',
'confirm': {
'confirm': {
'text': 'confirm',
'type': 'plain_text'
},
'deny': {
'text': 'deny',
'type': 'plain_text'
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'title': {
'text': 'title',
'type': 'plain_text'
}
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'type': 'button',
'url': 'http://crispy.dev',
'value': 'override'
}
snapshots['TestFixedButton.test_override[confirm-value2] 1'] = {
'action_id': 'test_action',
'confirm': {
'confirm': {
'text': 'confirm',
'type': 'plain_text'
},
'deny': {
'text': 'deny',
'type': 'plain_text'
},
'text': {
'text': 'override',
'type': 'plain_text'
},
'title': {
'text': 'override',
'type': 'plain_text'
}
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'type': 'button',
'url': 'http://crispy.dev',
'value': 'value'
}
snapshots['TestFixedButton.test_override[text-override] 1'] = {
'action_id': 'test_action',
'confirm': {
'confirm': {
'text': 'confirm',
'type': 'plain_text'
},
'deny': {
'text': 'deny',
'type': 'plain_text'
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'title': {
'text': 'title',
'type': 'plain_text'
}
},
'text': {
'text': 'override',
'type': 'plain_text'
},
'type': 'button',
'url': 'http://crispy.dev',
'value': 'value'
}
snapshots['TestFixedButton.test_override[style-Styles.DANGER] 1'] = {
'action_id': 'test_action',
'confirm': {
'confirm': {
'text': 'confirm',
'type': 'plain_text'
},
'deny': {
'text': 'deny',
'type': 'plain_text'
},
'text': {
'text': 'text',
'type': 'plain_text'
},
'title': {
'text': 'title',
'type': 'plain_text'
}
},
'style': 'danger',
'text': {
'text': 'text',
'type': 'plain_text'
},
'type': 'button',
'url': 'http://crispy.dev',
'value': 'value'
}
| 22.016393
| 69
| 0.409034
| 321
| 4,029
| 4.968847
| 0.121495
| 0.165517
| 0.244514
| 0.117241
| 0.825705
| 0.815047
| 0.815047
| 0.815047
| 0.815047
| 0.796865
| 0
| 0.004098
| 0.394391
| 4,029
| 182
| 70
| 22.137363
| 0.64959
| 0.015388
| 0
| 0.686047
| 0
| 0
| 0.384208
| 0.065338
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011628
| 0
| 0.011628
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44fdecae87fda71b21a46954e9439d2ff2ce705c
| 230
|
py
|
Python
|
deprecated_dataloaders/robotics/__init__.py
|
TianhaoFu/MultiBench
|
b174a3187124d6f92be1ff3b487eef292f7883bb
|
[
"MIT"
] | 148
|
2021-03-06T06:54:13.000Z
|
2022-03-29T19:27:21.000Z
|
deprecated_dataloaders/robotics/__init__.py
|
TianhaoFu/MultiBench
|
b174a3187124d6f92be1ff3b487eef292f7883bb
|
[
"MIT"
] | 10
|
2021-07-19T22:57:49.000Z
|
2022-02-04T03:12:29.000Z
|
deprecated_dataloaders/robotics/__init__.py
|
TianhaoFu/MultiBench
|
b174a3187124d6f92be1ff3b487eef292f7883bb
|
[
"MIT"
] | 18
|
2021-07-22T07:17:27.000Z
|
2022-03-27T16:11:40.000Z
|
from .MultimodalManipulationDataset import MultimodalManipulationDataset
from .MultimodalManipulationDataset_robust import MultimodalManipulationDataset_robust
from .ProcessForce import ProcessForce
from .ToTensor import ToTensor
| 46
| 86
| 0.913043
| 18
| 230
| 11.555556
| 0.333333
| 0.317308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069565
| 230
| 4
| 87
| 57.5
| 0.971963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
78a9ca786388ec6492ae629ed68c8620143b68be
| 474
|
py
|
Python
|
testsuite/tests/Q914-045__UT_ex/run_test.py
|
AdaCore/style_checker
|
17108ebfc44375498063ecdad6c6e4430458e60a
|
[
"CNRI-Python"
] | 2
|
2017-10-22T18:04:26.000Z
|
2020-03-06T11:07:41.000Z
|
testsuite/tests/Q914-045__UT_ex/run_test.py
|
AdaCore/style_checker
|
17108ebfc44375498063ecdad6c6e4430458e60a
|
[
"CNRI-Python"
] | null | null | null |
testsuite/tests/Q914-045__UT_ex/run_test.py
|
AdaCore/style_checker
|
17108ebfc44375498063ecdad6c6e4430458e60a
|
[
"CNRI-Python"
] | 4
|
2018-05-22T12:08:54.000Z
|
2020-12-14T15:25:27.000Z
|
def test_quote_arg_empty_arg(style_checker):
style_checker.enable_unit_test()
from asclib.ex import quote_arg
assert quote_arg('') == "''"
def test_quote_arg_single_quote(style_checker):
style_checker.enable_unit_test()
from asclib.ex import quote_arg
assert quote_arg("'") == r"''\'''"
def test_quote_arg_newline(style_checker):
style_checker.enable_unit_test()
from asclib.ex import quote_arg
assert quote_arg('\n') == r"''\n''"
| 23.7
| 47
| 0.71308
| 69
| 474
| 4.478261
| 0.26087
| 0.23301
| 0.116505
| 0.145631
| 0.757282
| 0.757282
| 0.757282
| 0.757282
| 0.757282
| 0.757282
| 0
| 0
| 0.160338
| 474
| 19
| 48
| 24.947368
| 0.776382
| 0
| 0
| 0.5
| 0
| 0
| 0.035865
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78c1c84203b63cdc56c2fd0c77a92accc07739ac
| 769
|
py
|
Python
|
Jupyter/cntccp4emaps.py
|
MooersLab/jupyterlabpymolpysnipsplus
|
b886750d63372434df53d4d6d7cdad6cb02ae4e7
|
[
"MIT"
] | null | null | null |
Jupyter/cntccp4emaps.py
|
MooersLab/jupyterlabpymolpysnipsplus
|
b886750d63372434df53d4d6d7cdad6cb02ae4e7
|
[
"MIT"
] | null | null | null |
Jupyter/cntccp4emaps.py
|
MooersLab/jupyterlabpymolpysnipsplus
|
b886750d63372434df53d4d6d7cdad6cb02ae4e7
|
[
"MIT"
] | null | null | null |
# Description: Count number of *.ccp4 (electron density map) files in current directory.
# Source: placeHolder
"""
cmd.do('print("Count the number of ccp4 electron density files in current directory.");')
cmd.do('print("Usage: cntccp4s");')
cmd.do('myPath = os.getcwd();')
cmd.do('ccp4Counter = len(glob.glob1(myPath,"*.pse"));')
cmd.do('print("Number of number of ccp4 electron density files in the current directory: ", ccp4Counter);')
"""
cmd.do('print("Count the number of ccp4 electron density files in current directory.");')
cmd.do('print("Usage: cntccp4s");')
cmd.do('myPath = os.getcwd();')
cmd.do('ccp4Counter = len(glob.glob1(myPath,"*.pse"));')
cmd.do('print("Number of number of ccp4 electron density files in the current directory: ", ccp4Counter);')
| 45.235294
| 107
| 0.707412
| 110
| 769
| 4.945455
| 0.254545
| 0.091912
| 0.110294
| 0.183824
| 0.891544
| 0.841912
| 0.841912
| 0.841912
| 0.841912
| 0.841912
| 0
| 0.01909
| 0.114434
| 769
| 16
| 108
| 48.0625
| 0.779736
| 0.561769
| 0
| 0
| 0
| 0
| 0.817073
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
153f8bf6af9aedf1816bf59f9c152b6b9c4eb60e
| 2,199
|
py
|
Python
|
LifeMeter.py
|
VizStars7/LifeMeter
|
f67abf0a8f897788b4e1072685a0a74cd54d797d
|
[
"Apache-2.0"
] | null | null | null |
LifeMeter.py
|
VizStars7/LifeMeter
|
f67abf0a8f897788b4e1072685a0a74cd54d797d
|
[
"Apache-2.0"
] | null | null | null |
LifeMeter.py
|
VizStars7/LifeMeter
|
f67abf0a8f897788b4e1072685a0a74cd54d797d
|
[
"Apache-2.0"
] | null | null | null |
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00@\x00\x00\x00s\xd0\x00\x00\x00d\x00d\x01l\x00m\x01Z\x01\x01\x00d\x02Z\x02e\x01d\x03\x83\x01\x01\x00e\x03e\x02\x83\x01\x01\x00e\x04d\x04\x83\x01Z\x05e\x01d\x03\x83\x01\x01\x00e\x03d\x05e\x05\x83\x02\x01\x00e\x01d\x03\x83\x01\x01\x00e\x03d\x06\x83\x01\x01\x00e\x06e\x04d\x07\x83\x01\x83\x01Z\x07e\x08e\x07d\x08\x14\x00\x83\x01Z\te\x08e\x07d\t\x14\x00\x83\x01Z\ne\x08e\x07d\n\x14\x00\x83\x01Z\x0be\x08e\x07d\x0b\x14\x00\x83\x01Z\x0ce\x01d\x03\x83\x01\x01\x00e\x03e\x05d\x0cd\re\td\x0ed\x0fe\nd\x10d\x0fe\x0bd\x11d\x0fe\x0cd\x12\x83\x0e\x01\x00d\x13Z\re\x01d\x03\x83\x01\x01\x00e\x03e\r\x83\x01\x01\x00e\x03e\x05\x83\x01\x01\x00d\x14S\x00)\x15\xe9\x00\x00\x00\x00)\x01\xda\x05sleepa\xc0\x01\x00\x00\n\x1b[33;1m\n _ _ __ __ _ _\n | | (_)/ _|___ | \\/ |___| |_ ___ _ _\n | |__| | _/ -_) | |\\/| / -_) _/ -_) \'_|\n |____|_|_| \\___| |_| |_\\___|\\__\\___|_|\n\n \x1b[35;1m\n=====================================================\n| |\n| C0DED BY : Heartz Brotherhood | Mr.Vizstars |\n| |\n=====================================================\n\n\xe9\x01\x00\x00\x00z\x1c\x1b[34;1m Masukan Nama Anda: z%Cek Seberapa Lama Kamu Hidup Di Duniaz&\x1b[33;1m Sekarang Masukan Umur Kamu!!! z\x02->id\x01\x00\x00i\xec\r\x00\x00i\x10\x87\x00\x00i\xa5J\x05\x00z\x11kamu hidup selama\xfa\x01:Z\x04hari\xfa\x01,Z\x03JamZ\x05menitZ\x05detikz\xd4\n\n\x1b[35;1m\n ___ _ __ __ _ _\n / __|___ _ __ __ _| |_ | \\/ |__ _| |_(_)\n | (__/ -_) \'_ \\/ _` | _| | |\\/| / _` | _| |\n \\___\\___| .__/\\__,_|\\__| |_| |_\\__,_|\\__|_|\n |_|\n\nN)\x0e\xda\x04timer\x02\x00\x00\x00\xda\x06banner\xda\x05print\xda\x05input\xda\x01i\xda\x03intZ\x03age\xda\x03strZ\x03DayZ\x04hour\xda\x03minZ\x03sec\xda\x01b\xa9\x00r\x0f\x00\x00\x00r\x0f\x00\x00\x00\xda\x00\xda\x08<module>\x01\x00\x00\x00s&\x00\x00\x00\x0c\x10\x04\x01\x08\x01\x08\x02\x08\x01\x08\x01\n\x01\x08\x01\x08\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x08\x01"\x0b\x04\x01\x08\x01\x08\x01'))
| 1,099.5
| 2,184
| 0.606185
| 363
| 2,199
| 3.37741
| 0.333333
| 0.141925
| 0.132137
| 0.117455
| 0.243883
| 0.181077
| 0.181077
| 0.075856
| 0.056281
| 0.029364
| 0
| 0.270833
| 0.126876
| 2,199
| 2
| 2,184
| 1,099.5
| 0.367708
| 0
| 0
| 0
| 0
| 1
| 0.638182
| 0.538636
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 11
|
155e4fc12333d410be0f5ab3eeb8e4ba2bf3e88e
| 10,117
|
py
|
Python
|
lib/systems/c150h30.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/c150h30.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/c150h30.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns c150h30 as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 1.21035 0.69880 0.00000
C 2.45871 1.41954 0.00000
C 1.21035 -0.69880 0.00000
C 0.00000 1.39760 0.00000
C 3.74723 0.68322 0.00000
C 2.45871 -1.41954 0.00000
C 2.46530 2.90358 0.00000
C -0.00000 -1.39760 -0.00000
C 0.00000 2.83907 0.00000
C -1.21035 0.69880 -0.00000
C 1.28192 3.58681 0.00000
C 3.74723 -0.68322 0.00000
C -1.21035 -0.69880 -0.00000
C 5.00132 1.43556 0.00000
C 2.46530 -2.90358 0.00000
C 3.74390 3.61349 0.00000
C -0.00000 -2.83907 -0.00000
C -1.28192 3.58681 -0.00000
C -2.45871 1.41954 -0.00000
C 5.00132 -1.43556 0.00000
C 4.99526 2.88402 0.00000
C 1.28192 -3.58681 0.00000
C 1.25743 5.04905 0.00000
C -2.45871 -1.41954 -0.00000
C -2.46530 2.90358 -0.00000
C 6.25025 0.70154 0.00000
C 3.74390 -3.61349 0.00000
C 3.73267 5.06210 0.00000
C -1.28192 -3.58681 -0.00000
C -1.25743 5.04905 0.00000
C -3.74723 0.68322 -0.00000
C 6.25025 -0.70154 0.00000
C 4.99526 -2.88402 0.00000
C 2.51758 5.76364 0.00000
C -2.46530 -2.90358 -0.00000
C 0.00000 5.76803 0.00000
C -3.74723 -0.68322 -0.00000
C 6.27163 3.62093 0.00000
C 1.25743 -5.04905 -0.00000
C -3.74390 3.61349 -0.00000
C 7.50295 1.43429 0.00000
C 3.73267 -5.06210 0.00000
C 4.99360 5.78060 0.00000
C -1.25743 -5.04905 -0.00000
C -2.51758 5.76364 -0.00000
C -5.00132 1.43556 -0.00000
C 7.50295 -1.43429 0.00000
C 6.27163 -3.62093 0.00000
C 7.50444 2.89417 0.00000
C 2.51758 -5.76364 0.00000
C 6.25865 5.05195 0.00000
C -0.00000 -5.76803 -0.00000
C 2.50935 7.21489 0.00000
C -3.74390 -3.61349 -0.00000
C 0.00000 7.24185 0.00000
C -5.00132 -1.43556 -0.00000
C -3.73267 5.06210 -0.00000
C -4.99526 2.88402 -0.00000
C 7.50444 -2.89417 0.00000
C 1.24579 7.94612 0.00000
C -4.99526 -2.88402 -0.00000
C 8.75839 0.70637 0.00000
C 4.99360 -5.78060 0.00000
C 4.99093 7.23181 0.00000
C -2.51758 -5.76364 -0.00000
C -2.50935 7.21489 -0.00000
C -6.25025 0.70154 -0.00000
C 8.75839 -0.70637 0.00000
C 6.25865 -5.05195 0.00000
C 3.76746 7.93817 0.00000
C -3.73267 -5.06210 -0.00000
C -1.24579 7.94612 0.00000
C -6.25025 -0.70154 -0.00000
C 8.77337 3.58003 0.00000
C 2.50935 -7.21489 0.00000
C 7.48708 5.80795 0.00000
C -0.00000 -7.24185 -0.00000
C -4.99360 5.78060 -0.00000
C -6.27163 3.62093 -0.00000
C 8.77337 -3.58003 0.00000
C 8.80955 5.08619 0.00000
C 1.24579 -7.94612 -0.00000
C 1.28629 9.38797 0.00000
C -6.27163 -3.62093 -0.00000
C -6.25865 5.05195 -0.00000
C 9.97076 1.41600 0.00000
C 4.99093 -7.23181 0.00000
C 6.21167 7.92694 0.00000
C -2.50935 -7.21489 -0.00000
C -3.76746 7.93817 -0.00000
C -7.50295 1.43429 -0.00000
C 9.97076 -1.41600 0.00000
C 7.48708 -5.80795 0.00000
C 9.96017 2.86131 0.00000
C 3.76746 -7.93817 0.00000
C 7.45805 7.19511 0.00000
C -1.24579 -7.94612 -0.00000
C 3.75909 9.34293 0.00000
C -4.99360 -5.78060 -0.00000
C -1.28629 9.38797 0.00000
C -7.50295 -1.43429 -0.00000
C -4.99093 7.23181 -0.00000
C -7.50444 2.89417 -0.00000
C 9.96017 -2.86131 0.00000
C 8.80955 -5.08619 0.00000
C 2.50212 10.05642 0.00000
C -6.25865 -5.05195 -0.00000
C 0.00000 10.17239 0.00000
C -7.50444 -2.89417 -0.00000
C 11.18178 0.70229 0.00000
C 6.21167 -7.92694 0.00000
C 6.19909 9.33256 0.00000
C -3.76746 -7.93817 -0.00000
C -3.75909 9.34293 -0.00000
C -8.75839 0.70637 -0.00000
C -7.48708 5.80795 -0.00000
C 11.18178 -0.70229 0.00000
C 7.45805 -7.19511 0.00000
C 1.28629 -9.38797 -0.00000
C 4.98269 10.03485 0.00000
C -4.99093 -7.23181 -0.00000
C -2.50212 10.05642 0.00000
C -8.75839 -0.70637 -0.00000
C 3.75909 -9.34293 0.00000
C -1.28629 -9.38797 -0.00000
C -6.21167 7.92694 -0.00000
C -8.77337 3.58003 -0.00000
C 2.50212 -10.05642 -0.00000
C -0.00000 -10.17239 -0.00000
C -7.48708 -5.80795 -0.00000
C -8.77337 -3.58003 -0.00000
C -7.45805 7.19511 -0.00000
C -8.80955 5.08619 -0.00000
C 6.19909 -9.33256 0.00000
C -3.75909 -9.34293 -0.00000
C -4.98269 10.03485 -0.00000
C -9.97076 1.41600 -0.00000
C 4.98269 -10.03485 0.00000
C -2.50212 -10.05642 -0.00000
C -6.21167 -7.92694 -0.00000
C -9.97076 -1.41600 -0.00000
C -6.19909 9.33256 -0.00000
C -9.96017 2.86131 -0.00000
C -8.80955 -5.08619 -0.00000
C -7.45805 -7.19511 -0.00000
C -9.96017 -2.86131 -0.00000
C -4.98269 -10.03485 -0.00000
C -11.18178 0.70229 -0.00000
C -6.19909 -9.33256 -0.00000
C -11.18178 -0.70229 -0.00000
H 9.77190 5.64181 0.00000
H 10.90424 3.39663 0.00000
H 8.39368 7.74503 0.00000
H 10.90424 -3.39663 0.00000
H 9.77190 -5.64181 0.00000
H 2.51055 11.14166 0.00000
H 0.00000 11.28362 0.00000
H 12.12680 1.23168 0.00000
H 7.13006 9.88628 0.00000
H 12.12680 -1.23168 0.00000
H 8.39368 -7.74503 0.00000
H 4.99674 11.11796 0.00000
H -2.51055 11.14166 0.00000
H 2.51055 -11.14166 -0.00000
H -0.00000 -11.28362 -0.00000
H -8.39368 7.74503 -0.00000
H -9.77190 5.64181 -0.00000
H 7.13006 -9.88628 0.00000
H -4.99674 11.11796 -0.00000
H 4.99674 -11.11796 0.00000
H -2.51055 -11.14166 -0.00000
H -7.13006 9.88628 -0.00000
H -10.90424 3.39663 -0.00000
H -9.77190 -5.64181 -0.00000
H -8.39368 -7.74503 -0.00000
H -10.90424 -3.39663 -0.00000
H -4.99674 -11.11796 -0.00000
H -12.12680 1.23168 -0.00000
H -7.13006 -9.88628 -0.00000
H -12.12680 -1.23168 -0.00000
""")
| 53.81383
| 63
| 0.355046
| 1,289
| 10,117
| 2.784329
| 0.101629
| 0.320981
| 0.29061
| 0.044581
| 0.961828
| 0.961828
| 0.961828
| 0.961828
| 0.958206
| 0.954583
| 0
| 0.74023
| 0.56242
| 10,117
| 187
| 64
| 54.101604
| 0.070477
| 0.01028
| 0
| 0
| 0
| 0
| 0.991594
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005435
| true
| 0
| 0.005435
| 0
| 0.016304
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
156e84da1b7e41da9f830a3da9ce852344a74aca
| 6,457
|
py
|
Python
|
php4dvd/test_film_adding.py
|
semenkravchenko/selenium-py-training-kravchenko
|
beefad65de6a42f184fd3eeb3b998c7806edb4b2
|
[
"Apache-2.0"
] | null | null | null |
php4dvd/test_film_adding.py
|
semenkravchenko/selenium-py-training-kravchenko
|
beefad65de6a42f184fd3eeb3b998c7806edb4b2
|
[
"Apache-2.0"
] | null | null | null |
php4dvd/test_film_adding.py
|
semenkravchenko/selenium-py-training-kravchenko
|
beefad65de6a42f184fd3eeb3b998c7806edb4b2
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from model.user import User
import unittest
def test_film_adding_imdb_search(app):
search_target_string = u"Криминальное чтиво"
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
films_before_adding = app.get_films_on_page()
app.open_add_film_form()
app.search_and_add_film_from_imdb(search_target_string)
films_after_adding = app.get_films_on_page()
app.is_film_list_changed(films_before_adding, films_after_adding)
app.logout()
assert app.is_not_logged_in()
def test_film_adding_imbdid_field(app):
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
app.open_add_film_form()
app.is_field_works(element="imdbid", keys="666", required_class="digits")
app.is_field_works(element="imdbid", keys="-1", required_class="digits error")
app.is_field_works(element="imdbid", keys="not a number", required_class="digits error")
app.is_field_works(element="imdbid", keys="", required_class="digits")
app.logout()
assert app.is_not_logged_in()
def test_film_adding_name_field(app):
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
app.open_add_film_form()
app.is_field_works(element="name", keys="13", required_class="required")
app.is_field_works(element="name", keys="-1", required_class="required")
app.is_field_works(element="name", keys="some name", required_class="required")
app.is_field_works(element="name", keys="", required_class="required error")
app.is_field_works(element="name", keys="!@#$%^&*()", required_class="required")
app.logout()
assert app.is_not_logged_in()
def test_film_adding_year_field(app):
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
app.open_add_film_form()
app.is_field_works(element="year", keys="1937", required_class="required digits")
app.is_field_works(element="year", keys="-1", required_class="required digits error")
app.is_field_works(element="year", keys="not a number", required_class="required digits error")
app.is_field_works(element="year", keys="", required_class="required digits error")
app.logout()
assert app.is_not_logged_in()
def test_film_adding_duration_field(app):
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
app.open_add_film_form()
app.is_field_works(element="duration", keys="13", required_class="digits")
app.is_field_works(element="duration", keys="-1", required_class="digits error")
app.is_field_works(element="duration", keys="not a number", required_class="digits error")
app.is_field_works(element="duration", keys="", required_class="digits")
app.logout()
assert app.is_not_logged_in()
def test_film_adding_rating_field(app):
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
app.open_add_film_form()
app.is_field_works(element="rating", keys="13", required_class="number")
app.is_field_works(element="rating", keys="-1", required_class="number")
app.is_field_works(element="rating", keys="not a number", required_class="number error")
app.is_field_works(element="rating", keys="", required_class="number")
app.logout()
assert app.is_not_logged_in()
def test_film_adding_trailer_field(app):
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
app.open_add_film_form()
app.is_field_works(element="trailer", keys="13", required_class="url error")
app.is_field_works(element="trailer", keys="-1", required_class="url error")
app.is_field_works(element="trailer", keys="some text", required_class="url error")
app.is_field_works(element="trailer", keys="", required_class="url")
app.is_field_works(element="trailer", keys="http://www.ru", required_class="url")
app.logout()
assert app.is_not_logged_in()
def test_film_adding_full_data(app):
app.ensure_logout()
app.login(User.Admin())
assert app.is_logged_in()
app.open_add_film_form()
films_before_adding = app.get_films_on_page()
app.is_field_works(element="imdbid", keys="13", required_class="digits")
app.is_field_works(element="name", keys="Seven", required_class="required")
app.is_field_works(element="year", keys="1999", required_class="required digits")
app.is_field_works(element="duration", keys="777", required_class="digits")
app.is_field_works(element="rating", keys="5", required_class="number")
app.is_field_works(element="format", keys="VHS", required_class="required ui-autocomplete-input")
app.is_field_works(element="aka", keys="aka", required_class=None, is_required=False)
app.is_field_works(element="notes", keys="notes", required_class=None, is_required=False)
app.is_field_works(element="taglines", keys="taglines", required_class=None, is_required=False)
app.is_field_works(element="plotoutline", keys="plotoutline", required_class=None, is_required=False)
app.is_field_works(element="plots", keys="plots", required_class=None, is_required=False)
app.is_field_works(element="languages", keys="Russian", required_class=None, is_required=False)
app.is_field_works(element="subtitles", keys="subtitles", required_class=None, is_required=False)
app.is_field_works(element="audio", keys="audio", required_class=None, is_required=False)
app.is_field_works(element="video", keys="video", required_class=None, is_required=False)
app.is_field_works(element="country", keys="country", required_class=None, is_required=False)
app.is_field_works(element="genres", keys="genres", required_class=None, is_required=False)
app.is_field_works(element="director", keys="director", required_class=None, is_required=False)
app.is_field_works(element="writer", keys="writer", required_class=None, is_required=False)
app.is_field_works(element="producer", keys="producer", required_class=None, is_required=False)
app.is_field_works(element="music", keys="music", required_class=None, is_required=False)
app.is_field_works(element="cast", keys="cast", required_class=None, is_required=False)
app.home_return_call()
films_after_adding = app.get_films_on_page()
app.is_film_list_changed(films_before_adding, films_after_adding)
app.logout()
assert app.is_not_logged_in()
if __name__ == "__main__":
unittest.main()
| 40.10559
| 105
| 0.735326
| 946
| 6,457
| 4.683932
| 0.108879
| 0.074475
| 0.108328
| 0.162492
| 0.84947
| 0.844053
| 0.823065
| 0.773866
| 0.731438
| 0.69262
| 0
| 0.005629
| 0.11956
| 6,457
| 160
| 106
| 40.35625
| 0.773791
| 0.003252
| 0
| 0.461538
| 0
| 0
| 0.136929
| 0.003264
| 0
| 0
| 0
| 0
| 0.136752
| 1
| 0.068376
| false
| 0
| 0.017094
| 0
| 0.08547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
15bf2d911c8a8822759e78af0689f7946111a57c
| 5,987
|
py
|
Python
|
lms_app/migrations/0002_auto_20201211_1025.py
|
neethu-niya/sbr
|
cdae2000e718ccc6fca948d241f29acb2d2b388d
|
[
"MIT"
] | null | null | null |
lms_app/migrations/0002_auto_20201211_1025.py
|
neethu-niya/sbr
|
cdae2000e718ccc6fca948d241f29acb2d2b388d
|
[
"MIT"
] | null | null | null |
lms_app/migrations/0002_auto_20201211_1025.py
|
neethu-niya/sbr
|
cdae2000e718ccc6fca948d241f29acb2d2b388d
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0 on 2020-12-11 10:25
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('cities_light', '0010_auto_20200508_1851'),
('lms_app', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='teacher',
name='user',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='subject',
name='standard',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Standard'),
),
migrations.AddField(
model_name='study_material',
name='chapter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Chapter'),
),
migrations.AddField(
model_name='student',
name='country',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='cities_light.Country'),
),
migrations.AddField(
model_name='student',
name='present_country',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pre_country', to='cities_light.Country'),
),
migrations.AddField(
model_name='student',
name='scheme',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lms_app.Scheme'),
),
migrations.AddField(
model_name='student',
name='standard',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Standard'),
),
migrations.AddField(
model_name='student',
name='state',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='cities_light.Region'),
),
migrations.AddField(
model_name='student',
name='syllabus',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lms_app.Syllabus'),
),
migrations.AddField(
model_name='student',
name='user',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='standard',
name='syllabus',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Syllabus'),
),
migrations.AddField(
model_name='scheme',
name='standard',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Standard'),
),
migrations.AddField(
model_name='scheme',
name='subject',
field=models.ManyToManyField(to='lms_app.Subject'),
),
migrations.AddField(
model_name='scheme',
name='syllabus',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Syllabus'),
),
migrations.AddField(
model_name='question_paper',
name='chapter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Chapter'),
),
migrations.AddField(
model_name='profile',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='file',
name='chapter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Chapter'),
),
migrations.AddField(
model_name='file',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='documents',
name='chapter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Chapter'),
),
migrations.AddField(
model_name='comment',
name='student',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='lms_app.Student'),
),
migrations.AddField(
model_name='comment',
name='teacher',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='lms_app.Teacher'),
),
migrations.AddField(
model_name='comment',
name='video',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='lms_app.Video'),
),
migrations.AddField(
model_name='chat',
name='chapter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Chapter'),
),
migrations.AddField(
model_name='chat',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='chapter',
name='standard',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Standard'),
),
migrations.AddField(
model_name='chapter',
name='subject',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lms_app.Subject'),
),
]
| 39.913333
| 147
| 0.604142
| 631
| 5,987
| 5.580032
| 0.115689
| 0.061346
| 0.10338
| 0.162454
| 0.849759
| 0.838398
| 0.753763
| 0.753763
| 0.753763
| 0.753763
| 0
| 0.007766
| 0.268749
| 5,987
| 149
| 148
| 40.181208
| 0.796482
| 0.007182
| 0
| 0.739437
| 1
| 0
| 0.127735
| 0.003871
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021127
| 0
| 0.049296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ecb34e9248539dd548316446f0fe7fe040feef4b
| 39,357
|
py
|
Python
|
src/World_Bank_Correlations/World_Bank_Correlations.py
|
JohnMarion54/World_Bank_Correlations
|
d677f3e8708fe5e04166adaf5a4a14cbe61de181
|
[
"MIT"
] | null | null | null |
src/World_Bank_Correlations/World_Bank_Correlations.py
|
JohnMarion54/World_Bank_Correlations
|
d677f3e8708fe5e04166adaf5a4a14cbe61de181
|
[
"MIT"
] | null | null | null |
src/World_Bank_Correlations/World_Bank_Correlations.py
|
JohnMarion54/World_Bank_Correlations
|
d677f3e8708fe5e04166adaf5a4a14cbe61de181
|
[
"MIT"
] | null | null | null |
import requests
import pandas as pd
import world_bank_data as wb
import lxml
def wb_corr(data, col, indicator, change=False):
pd.options.mode.chained_assignment = None # Change option within function to avoid warning of value being placed on a copy of a slice.
"""
Returns the relationship that an input variable has with a chosen variable or chosen variables from the World Bank data, sorted by the strength of relationship
Relationship can be either the correlation between the input variable and the chosen indicator(s) or the correlation in the annual percent changes
Parameters
----------
data: A pandas dataframe that contains a column of countries called "Country," a column of years called "Year," and a column of data for a variable
col: The integer index of the column in which the data of your variable exists in your dataframe
indicator: The indicator or list of indicators to check the relationship with the input variable. Can be a character string of the indicator ID or a list
of character strings. Indicator IDs can be found through use of the World Bank APIs
change: A Boolean value. When set to True, the correlation between the annual percent change of the input variable and the annual percent change of
chosen indicator(s) will be found and used to order the strength of relationships
Returns
----------
Pandas DataFrame
A Pandas DataFrame containing the indicator names as the index and the correlation between the indicator and the input variable. If change set to True,
another column including the correlation between the annual percent changes of the variables will be included. The DataFrame is ordered on the
correlation if change is set to False and on the correlation of percent changes if change is set to True.
The number of rows in the DataFrame will correspond to the number of indicators that were requested. The number of columns will be 1 if change is
set to False and 2 if change is True.
Examples
----------
>>> import ____
>>> wb_corr(my_df, 2, '3.0.Gini') #where my_df has columns Country, Year, Data
|Indicator | Correlation | n
--------------------------------------
|Gini Coefficient| -0.955466 | 172
>>> wb_corr(wb.get_series('SP.POP.TOTL',mrv=50).reset_index,3,['3.0.Gini','1.0.HCount.1.90usd'],True) # To compare one WB indicator with others
| Indicator | Correlation | n | Correlation_change | n_change
----------------------------------------------------------------------------------------
| Poverty Headcount ($1.90 a day)| -0.001202 |172 | 0.065375 | 134
| Gini Coefficient | 0.252892 |172 | 0.000300 | 134
"""
assert type(indicator)==str or type(indicator)==list, "indicator must be either a string or a list of strings"
assert type(col)==int, "col must be the integer index of the column containing data on the variable of interest"
assert 'Country' in data.columns, "data must have a column containing countries called 'Country'"
assert 'Year' in data.columns, "Data must have a column containing years called 'Year'"
assert col<data.shape[1], "col must be a column index belonging to data"
assert type(change)==bool, "change must be a Boolean value (True or False)"
cors=[]
indicators=[]
n=[]
if type(indicator)==str:
assert indicator in list(pd.read_xml(requests.get('http://api.worldbank.org/v2/indicator?per_page=21000').content)['id']), "indicator must be the id of an indicator in the World Bank Data. Indicators can be found using the World Bank APIs. http://api.worldbank.org/v2/indicator?per_page=21000 to see all indicators or http://api.worldbank.org/v2/topic/_/indicator? to see indicators under a chosen topic (replace _ with integer 1-21)"
thing=pd.DataFrame(wb.get_series(indicator,mrv=50)) # Create a Pandas DataFrame with the data on the chosen indicator using the world_bank_data package
merged=pd.merge(data,thing,how='inner',on=['Country','Year'])
cors.append(merged.iloc[:,col].corr(merged.iloc[:,(merged.shape[1]-1)]))
indicators.append(pd.DataFrame(wb.get_series(indicator,mrv=1)).reset_index()['Series'][0])
n.append(len(merged[merged.iloc[:,col].notnull() & merged.iloc[:,(merged.shape[1]-1)].notnull()]))
if change==False:
return pd.DataFrame(list(zip(indicators,cors,n)),columns=['Indicator','Correlation','n']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
if change==True:
mumbo=pd.DataFrame() #Create an empty dataframe to include the annual percent change data for the input variable
cors_change=[]
n_change=[]
for country in data['Country'].unique():
s=data[data['Country']==country]
s.loc[:,'lag_dat']=s.iloc[:,col].shift(-1) # Generates warning message if option is not changed above
s.loc[:,'pct_chg_dat']=(((s.iloc[:,col]-s['lag_dat'])/s['lag_dat'])*100)
mumbo=pd.concat([mumbo,s])
t=thing.reset_index()
jumbo=pd.DataFrame() #Empty dataframe to contain the percent change data for World Bank data
for country in t['Country'].unique():
y=t[t['Country']==country]
y.loc[:,'lag_ind']=y.iloc[:,3].shift(-1) # Generates warning message if pandas option is not changed above
y.loc[:,'pct_chg_ind']=(((y.iloc[:,3]-y['lag_ind'])/y['lag_ind'])*100)
jumbo=pd.concat([jumbo,y])
merged_pct=pd.merge(mumbo,jumbo,how='left',on=['Country','Year']) #inner?
cors_change.append(merged_pct.loc[:,'pct_chg_dat'].corr(merged_pct.loc[:,'pct_chg_ind']))
n_change.append(len(merged_pct[merged_pct.loc[:,'pct_chg_dat'].notnull() & merged_pct.loc[:,'pct_chg_ind'].notnull()]))
return pd.DataFrame(list(zip(indicators,cors,n,cors_change,n_change)),columns=['Indicator','Correlation','n','Correlation_change','n_change']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
if type(indicator)==list:
for indic in indicator:
assert type(indic)==str, "Elements of indicator must be strings"
assert indic in list(pd.read_xml(requests.get('http://api.worldbank.org/v2/indicator?per_page=21000').content)['id']), "indicator must be the id of an indicator in the World Bank Data. Indicators can be found using the World Bank APIs. http://api.worldbank.org/v2/indicator?per_page=21000 to see all indicators or http://api.worldbank.org/v2/topic/_/indicator? to see indicators under a chosen topic (replace _ with integer 1-21)"
for i in range(0,len(indicator)):
thing=pd.DataFrame(wb.get_series(indicator[i],mrv=50)).reset_index() # Create a Pandas DataFrame with the data on the chosen indicator using the world_bank_data package
merged=pd.merge(data,thing,how='inner',on=['Country','Year'])
cors.append(merged.iloc[:,col].corr(merged.iloc[:,(merged.shape[1]-1)]))
indicators.append(pd.DataFrame(wb.get_series(indicator[i],mrv=1)).reset_index()['Series'][0])
n.append(len(merged[merged.iloc[:,col].notnull() & merged.iloc[:,(merged.shape[1]-1)].notnull()]))
if change==False:
return pd.DataFrame(list(zip(indicators,cors,n)),columns=['Indicator','Correlation','n']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
if change==True:
cors_change=[]
n_change=[]
for i in range(0,len(indicator)):
mumbo=pd.DataFrame() # Create an empty dataframe to include the annual percent change data for the input variable
jumbo=pd.DataFrame() # Empty dataframe to contain the percent change data for World Bank data
thing=pd.DataFrame(wb.get_series(indicator[i],mrv=50)).reset_index()
for country in data['Country'].unique():
s=data[data['Country']==country]
s.loc[:,'lag_dat']=s.iloc[:,col].shift(-1) # Generates warning message if pandas option is not changed above
s.loc[:,'pct_chg_dat']=(((s.iloc[:,col]-s['lag_dat'])/s['lag_dat'])*100)
mumbo=pd.concat([mumbo,s])
for country in thing['Country'].unique():
y=thing[thing['Country']==country]
y.loc[:,'lag_ind']=y.iloc[:,3].shift(-1) # Generates warning message if pandas option is not changed above
y.loc[:,'pct_chg_ind']=(((y.iloc[:,3]-y['lag_ind'])/y['lag_ind'])*100)
jumbo=pd.concat([jumbo,y])
merged_pct=pd.merge(mumbo,jumbo,how='left',on=['Country','Year'])
cors_change.append(merged_pct.loc[:,'pct_chg_dat'].corr(merged_pct.loc[:,'pct_chg_ind']))
n_change.append(len(merged_pct[merged_pct.loc[:,'pct_chg_dat'].notnull() & merged_pct.loc[:,'pct_chg_ind'].notnull()]))
return pd.DataFrame(list(zip(indicators,cors,n,cors_change,n_change)),columns=['Indicator','Correlation','n','Correlation_change','n_change']).sort_values(by='Correlation_change',key=abs,ascending=False).set_index('Indicator')
pd.options.mode.chained_assignment = orig_value
def wb_topic_corrs(data,col,topic,k=5,change=False,nlim=1,cor_lim=0,t_lim=0):
from math import sqrt
pd.options.mode.chained_assignment = None # Change option within function to avoid warning of value being placed on a copy of a slice.
"""
Returns the relationship that an input variable has with the indicators in a chosen topic from the World Bank data, sorted by the strength of relationship.
Relationship can be either the correlation between the input variable and the chosen indicator(s) or the correlation in the annual percent changes
Parameters
----------
data: A pandas dataframe that contains a column of countries called "Country," a column of years called "Year," and a column of data for a variable
col: The integer index of the column in which the data of your variable exists in your dataframe
topic: A character string of the topic name or the integer corresponding to the topic. Topics can be found through the World Bank APIs
k: An integer indicating the number of variables to return. The k variables with the strongest relationships to the input variable will be returned.
change: A Boolean value. When set to True, the correlation between the annual percent change of the input variable and the annual percent change of
chosen indicator(s) will be found and used to order the strength of relationships
nlim: An integer indicating the minimum n of indicators to be reported.
cor_lim: A real number indicating the minimum absolute value of the correlation between the input variable and World Bank indicators to be reported
t_lim: A real number indicating the minimum t score of the correlation between the input variable and World Bank indicators to be reported.
Returns
----------
Pandas DataFrame
A Pandas DataFrame containing the indicator names as the index and the correlation between the indicator and the input variable. If change set to True,
another column including the correlation between the annual percent changes of the variables will be included. The DataFrame is ordered on the
correlation if change is set to False and on the correlation of percent changes if change is set to True.
The number of rows in the DataFrame will be, at most, k. The number of columns will depend on the settings of change, nlim, and t_lim.
Examples
----------
>>> import ____
>>> wb_topic_corrs(my_df,2,1) #Where my_df has columns Country, Year, Data
| Indicator | Correlation | n
------------------------------------------------------------------------------
|Access to non-solid fuel, rural (% of rural population) |0.457662 |1519
|Access to electricity, rural (% of rural population) |0.457662 |1519
|Average precipitation in depth (mm per year) |-0.442344 |353
|Annual freshwater withdrawals, agriculture (% of total f|-0.429246 |313
|Livestock production index (2014-2016 = 100) |0.393510 |1696
>>> wb_topic_corrs(wb.get_series('3.0.Gini',mrv=50).reset_index(),3,'Energy & Mining',change=True,cor_lim=.2) #To check a WB variable against its own or another topic
| Indicator | Correlation | n | Correlation_change | n_change
----------------------------------------------------------------------------------------------------------------
|Access to electricity (% of population) |-0.434674 |172 | -0.232096 | 134
|Access to electricity, urban (% of urban population) |-0.276086 |172 | -0.225105 | 134
|Electricity production from coal sources (% of total) |0.066986 |172 | 0.200032 | 62
"""
assert type(topic)==int or type(topic)==str, "indicator must be either a string or an integer corresponding to the topic. A list of topics can be found through the World Bank API: http://api.worldbank.org/v2/topic?"
assert type(col)==int, "col must be the integer index of the column containing data on the variable of interest"
assert 'Country' in data.columns, "data must have a column containing countries called 'Country'"
assert 'Year' in data.columns, "data must have a column containing years called 'Year'"
assert col<data.shape[1], "col must be a column index belonging to data"
assert type(change)==bool, "change must be a Boolean value (True or False)"
assert type(k)==int, "k must be an integer"
assert type(nlim)==int, "n must be an integer"
assert (type(cor_lim)==float or type(cor_lim)==int), "cor_lim must be a real number"
assert (type(t_lim)==float or type(t_lim)==int), "n_lim must be a real number"
if topic=='Agriculture & Rural Development' or topic==1:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/1/indicator?per_page=50').content)
if topic=='Aid Effectiveness'or topic==2:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/2/indicator?per_page=80').content)
if topic=='Economy & Growth'or topic==3:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/3/indicator?per_page=310').content)
if topic=='Education'or topic==4:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/4/indicator?per_page=1015').content)
if topic=='Energy & Mining'or topic==5:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/5/indicator?per_page=55').content)
if topic=='Environment'or topic==6:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/6/indicator?per_page=145').content)
if topic=='Financial Sector'or topic==7:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/7/indicator?per_page=210').content)
if topic=='Health'or topic==8:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/8/indicator?per_page=651').content)
if topic=='Infrastructure'or topic==9:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/9/indicator?per_page=80').content)
if topic=='Social Protection & Labor'or topic==10:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/10/indicator?per_page=2150').content)
if topic=='Poverty'or topic==11:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/11/indicator?per_page=150').content)
if topic=='Private Sector'or topic==12:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/12/indicator?per_page=200').content)
if topic=='Public Sector'or topic==13:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/13/indicator?per_page=120').content)
if topic=='Science & Technology'or topic==14:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/14/indicator?per_page=15').content)
if topic=='Social Development'or topic==15:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/15/indicator?per_page=35').content)
if topic=='Urban Development'or topic==16:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/16/indicator?per_page=35').content)
if topic=='Gender'or topic==17:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/17/indicator?per_page=315').content)
if topic=='Millenium Development Goals'or topic==18:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/18/indicator?per_page=30').content)
if topic=='Climate Change'or topic==19:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/19/indicator?per_page=85').content)
if topic=='External Debt'or topic==20:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/20/indicator?per_page=520').content)
if topic=='Trade'or topic==21:
top_df=pd.read_xml(requests.get('http://api.worldbank.org/v2/topic/21/indicator?per_page=160').content)
cors=[]
indicators=[]
n=[]
t=[]
if change==False:
for i in range(0,(len(top_df['id']))):
try:
indicator=top_df.loc[i,'id']
thing=pd.DataFrame(wb.get_series(indicator,mrv=50))
except:
pass
merged=pd.merge(data,thing,how='inner',on=['Country','Year'])
cor_i=(merged.iloc[:,col].corr(merged.iloc[:,(merged.shape[1]-1)]))
cors.append(cor_i)
indicators.append(top_df['{http://www.worldbank.org}name'][i])
n_i=(len(merged[merged.iloc[:,col].notnull() & merged.iloc[:,(merged.shape[1]-1)].notnull()]))
n.append(n_i)
if cor_i==1 or cor_i==-1: # Avoid division by 0
t.append(None)
else:
t.append((cor_i*(sqrt((n_i-2)/(1-(cor_i*cor_i))))))
if t_lim==0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n)),columns=['Indicator','Correlation','n']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n>nlim) & ((almost_there.Correlation>cor_lim) | (almost_there.Correlation<-cor_lim))].head(k)
if t_lim != 0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,t)),columns=['Indicator','Correlation','n','t']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n>nlim) & ((almost_there.Correlation>cor_lim) | (almost_there.Correlation<-cor_lim)) & ((almost_there.t>t_lim) | (almost_there.t<-t_lim))].head(k)
if change==True:
cors_change=[]
n_change=[]
t_change=[]
mumbo=pd.DataFrame() # Create a Pandas DataFrame with the data on the chosen indicator using the world_bank_data package
for country in data['Country'].unique():
s=data[data['Country']==country]
s.loc[:,'lag_dat']=s.iloc[:,col].shift(-1) # Generates warning message if pandas option is not changed above
s.loc[:,'pct_chg_dat']=(((s.iloc[:,col]-s['lag_dat'])/s['lag_dat'])*100)
mumbo=pd.concat([mumbo,s])
for i in range(0,(len(top_df['id']))):
try:
indicator=top_df.loc[i,'id']
thing=pd.DataFrame(wb.get_series(indicator,mrv=50))
except:
pass # Some variables listed in the World Bank API have since been removed and will therefore be skipped
merged=pd.merge(data,thing,how='inner',on=['Country','Year'])
cor_i=(merged.iloc[:,col].corr(merged.iloc[:,(merged.shape[1]-1)]))
cors.append(cor_i)
n_i=len(merged[merged.iloc[:,col].notnull() & merged.iloc[:,(merged.shape[1]-1)].notnull()])
n.append(n_i)
t.append((cor_i*(sqrt((n_i-2)/(1-(cor_i*cor_i))))))
indicators.append(top_df.loc[i,'{http://www.worldbank.org}name'])
jumbo=pd.DataFrame() #Empty dataframe to contain the percent change data for World Bank data
thing_df=thing.reset_index()
for country in thing_df['Country'].unique():
y=thing_df[thing_df['Country']==country]
y.loc[:,'lag_ind']=y.iloc[:,3].shift(-1) # Generates warning message if pandas option is not changed above
y.loc[:,'pct_chg_ind']=(((y.iloc[:,3]-y['lag_ind'])/y['lag_ind'])*100)
jumbo=pd.concat([jumbo,y])
merged_pct=pd.merge(mumbo,jumbo,how='left',on=['Country','Year'])
cor_chg_i=merged_pct.loc[:,'pct_chg_dat'].corr(merged_pct.loc[:,'pct_chg_ind'])
cors_change.append(cor_chg_i)
n_chg_i=len(merged_pct[merged_pct.loc[:,'pct_chg_dat'].notnull() & merged_pct.loc[:,'pct_chg_ind'].notnull()])
n_change.append(n_chg_i)
if (cor_chg_i==1 or cor_chg_i==-1):
t_change.append(None)
else:
t_change.append(cor_chg_i*sqrt(((n_chg_i-2)/(1-(cor_chg_i*cor_chg_i)))))
if t_lim==0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,cors_change,n_change)),columns=['Indicator','Correlation','n','Correlation_change','n_change']).sort_values(by='Correlation_change',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n_change>nlim) & ((almost_there.Correlation_change>cor_lim) | (almost_there.Correlation_change<-cor_lim))].head(k)
if t_lim!=0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,t,cors_change,n_change,t_change)),columns=['Indicator','Correlation','n','t','Correlation_change','n_change','t_change']).sort_values(by='Correlation_change',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n_change>nlim) & ((almost_there.Correlation_change>cor_lim) | (almost_there.Correlation_change<-cor_lim)) & ((almost_there.t_change>t_lim) | (almost_there.t_change<(-t_lim)))].head(k)
pd.options.mode.chained_assignment = orig_value
def wb_corrs_search(data,col,search,k=5,change=False,nlim=1,cor_lim=0,t_lim=0):
from math import sqrt
pd.options.mode.chained_assignment = None # Change option within function to avoid warning of value being placed on a copy of a slice.
"""
Returns the relationship that an input variable has with the variables from the World Bank data that match a search, sorted by the strength of relationship
Relationship can be either the correlation between the input variable and the chosen indicator(s) or the correlation in the annual percent changes
Parameters
----------
data: A pandas dataframe that contains a column of countries called "Country," a column of years called "Year," and a column of data for a variable
col: The integer index of the column in which the data of your variable exists in your dataframe
search: The search to conduct. Variables that match the given search will be identified and their relationships with the input variable found.
k: An integer indicating the number of variables to return. The k variables with the strongest relationship with the input variable will be returned.
change: A Boolean value. When set to True, the correlation between the annual percent change of the input variable and the annual percent change of
chosen indicator(s) will be found and used to order the strength of relationships
nlim: An integer indicating the minimum n of indicators to be reported.
cor_lim: A real number indicating the minimum absolute value of the correlation between the input variable and World Bank indicators to be reported
t_lim: A real number indicating the minimum t score of the correlation between the input variable and World Bank indicators to be reported.
Returns
----------
Pandas DataFrame
A Pandas DataFrame containing the indicator names as the index and the correlation between the indicator and the input variable. If change set to True,
additional columns including the correlation between the annual percent changes of the variables and the number of observations used in this calculation
will be included. The DataFrame is ordered on the correlation if change is set to False and on the correlation of percent changes if change is set to True.
The number of rows in the dataframe will be, at most, k. The number of columns will depend on the settings of change, nlim, and t_lim.
Examples
----------
>>> import ____
>>> wb_corrs_search(my_df,2,"income share") #Where my_df has columns Country, Year, Data
|Indicator | Correlation | n
---------------------------------------------------------
|Income share held by highest 10% | -0.994108 | 1741
|Income share held by highest 20% | -0.993918 | 1741
|Income share held by third 20% | 0.977071 | 1741
|Income share held by second 20% | 0.973005 | 1741
|Income Share of Fifth Quintile | -0.962370 | 160
>>> wb_corrs_search(wb.get_series('3.0.Gini',mrv=50).reset_index(),3,"income share",change=True,t_lim=.5)
|Indicator | Correlation | n | t | Correlation_change | n_change | t_change
-------------------------------------------------------------------------------------------------------------
|Income Share of Fifth Quintile | 0.991789 |160 |97.479993 |0.983675 |125 |60.623743
|Income Share of Second Quintile | -0.985993 |160 |-74.309907 |-0.925918 |125 |-27.186186
|Income Share of Third Quintile | -0.964258 |160 |-45.744148 |-0.918473 |125 |-25.756680
|Income share held by highest 20%| 0.970095 |172 |52.110510 |0.872767 |134 |20.542079
|Income share held by highest 10%| 0.952781 |172 |40.910321 |0.857376 |134 |19.138677
"""
assert type(search)==str, "search must be a character string."
assert 'Country' in data.columns, "data must have a column containing countries called 'Country'"
assert 'Year' in data.columns, "data must have a column containing years called 'Year'"
assert type(col)==int, "col must be an integer of a column index that exists in data"
assert col<data.shape[1], "col must be a column index belonging to data"
assert type(change)==bool, "change must be a Boolean value (True or False)"
assert type(k)==int, "k must be an integer"
assert type(nlim)==int, "n must be an integer"
assert (type(cor_lim)==float or type(cor_lim)==int), "cor_lim must be a real number"
assert (type(t_lim)==float or type(t_lim)==int), "n_lim must be a real number"
inds=wb.search_indicators(search).reset_index()
cors=[]
indicators=[]
n=[]
t=[]
for indic in inds['id']:
try:
thing=pd.DataFrame(wb.get_series(indic,mrv=50))
except:
pass
merged=pd.merge(data,thing,how='left',on=['Country','Year'])
cor_i=merged.iloc[:,col].corr(merged.iloc[:,(merged.shape[1]-1)])
cors.append(cor_i)
indicators.append(pd.DataFrame(wb.get_series(indic,mrv=1)).reset_index()['Series'][0])
n_i=len(merged[merged.iloc[:,col].notnull() & merged.iloc[:,(merged.shape[1]-1)].notnull()])
n.append(n_i)
if cor_i==-1 or cor_i==1: # Avoid division by 0.
t.append(None)
else:
t.append((cor_i*(sqrt((n_i-2)/(1-(cor_i*cor_i))))))
if change==False:
if t_lim==0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n)),columns=['Indicator','Correlation','n']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n>nlim) & ((almost_there.Correlation>cor_lim) | (almost_there.Correlation<-cor_lim))].head(k)
if t_lim!=0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,t)),columns=['Indicator','Correlation','n','t']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n>nlim) & ((almost_there.Correlation>cor_lim) | (almost_there.Correlation<-cor_lim)) & ((almost_there.t>t_lim) | (almost_there.t<-t_lim))].head(k)
if change==True:
cors_chg=[]
n_change=[]
t_change=[]
mumbo=pd.DataFrame() # Create a Pandas DataFrame with the data on the chosen indicator using the world_bank_data package
for country in data['Country'].unique():
m=data[data['Country']==country]
m.loc[:,'lag_dat']=m.iloc[:,col].shift(-1) # Generates warning message if pandas option is not changed above
m.loc[:,'pct_chg_dat']=(((m.iloc[:,col]-m['lag_dat'])/m['lag_dat'])*100)
mumbo=pd.concat([mumbo,m])
for indic in inds['id']:
jumbo=pd.DataFrame()
thing2=pd.DataFrame(wb.get_series(indic,mrv=50)).reset_index()
for country in thing2['Country'].unique():
j=thing2[thing2['Country']==country]
j.loc[:,'lag_ind']=j.iloc[:,3].shift(-1) # Generates warning message if pandas option is not changed above
j.loc[:,'pct_chg_ind']=(((j.iloc[:,3]-j['lag_ind'])/j['lag_ind'])*100)
jumbo=pd.concat([jumbo,j]) #Empty dataframe to contain the percent change data for World Bank data
merged_pct=pd.merge(mumbo,jumbo,how='inner',on=['Country','Year'])
cor_chg_i=merged_pct.loc[:,'pct_chg_dat'].corr(merged_pct.loc[:,'pct_chg_ind'])
cors_chg.append(cor_chg_i)
n_chg_i=len(merged_pct[merged_pct.loc[:,'pct_chg_dat'].notnull() & merged_pct.loc[:,'pct_chg_ind'].notnull()])
n_change.append(n_chg_i)
if (cor_chg_i==1 or cor_chg_i==-1):
t_change.append(None)
else:
t_change.append(cor_chg_i*sqrt(((n_chg_i-2)/(1-(cor_chg_i*cor_chg_i)))))
if t_lim==0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,cors_chg,n_change)),columns=['Indicator','Correlation','n','Correlation_change','n_change']).sort_values(by='Correlation_change',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n_change>nlim) & ((almost_there.Correlation_change>cor_lim) | (almost_there.Correlation_change<-cor_lim))].head(k)
if t_lim!=0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,t,cors_chg,n_change,t_change)),columns=['Indicator','Correlation','n','t','Correlation_change','n_change','t_change']).sort_values(by='Correlation_change',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n_change>nlim) & ((almost_there.Correlation_change>cor_lim) | (almost_there.Correlation_change<-cor_lim)) & ((almost_there.t_change>t_lim) | (almost_there.t_change<-t_lim))].head(k)
pd.options.mode.chained_assignment = orig_value
def wb_every(data,col,k=5,change=False,nlim=1,cor_lim=0,t_lim=0):
pd.options.mode.chained_assignment = None # Change option within function to avoid warning of value being placed on a copy of a slice.
"""
Returns the k variables from the World Bank Dataset with the strongest relationship with an input variable, sorted by the strength of the relationship.
Relationship can be either the correlation that the input variable has with the variables from the World Bank Data or the correlation in the annual percent change of the variables.
Parameters
----------
data: A pandas dataframe that contains a column of countries called "Country," a column of years called "Year," and a column of data for a variable
col: The integer index of the column in which the data of your variable exists in your dataframe
search: The search to conduct. Variables that match the given search will be identified and their relationships with the input variable found.
k: An integer indicating the number of variables to return. The k variables with the strongest relationship with the input variable will be returned.
change: A Boolean value. When set to True, the correlation between the annual percent change of the input variable and the annual percent change of
chosen indicator(s) will be found and used to order the strength of relationships
nlim: An integer indicating the minimum n of indicators to be reported.
cor_lim: A real number indicating the minimum absolute value of the correlation between the input variable and World Bank indicators to be reported
t_lim: A real number indicating the minimum t score of the correlation between the input variable and World Bank indicators to be reported.
Returns
----------
Pandas DataFrame
A Pandas DataFrame containing the indicator names as the index and the correlation between the indicator and the input variable. If change set to True,
additional columns including the correlation between the annual percent changes of the variables and the number of observations included in this calculation
will be included. The DataFrame is ordered on the correlation if change is set to False and on the correlation of percent changes if change is set to True.
The number of rows in the dataframe will be, at most, k. The number of columns will depend on the settings of change, nlim, and t_lim.
"""
from math import sqrt
assert 'Country' in data.columns, "data must have a column containing countries called 'Country'"
assert 'Year' in data.columns, "data must have a column containing years called 'Year'"
assert type(col)==int, "col must be an integer of a column index that exists in data"
assert col<data.shape[1], "col must be a column index belonging to data"
assert type(change)==bool, "change must be a Boolean value (True or False)"
assert type(k)==int, "k must be an integer"
assert type(nlim)==int, "n must be an integer"
assert (type(cor_lim)==float or type(cor_lim)==int), "cor_lim must be a real number"
assert (type(t_lim)==float or type(t_lim)==int), "n_lim must be a real number"
pd.options.mode.chained_assignment = None
here_we_go=pd.read_xml(requests.get('http://api.worldbank.org/v2/indicator?per_page=20100').content)
cors=[]
indicators=[]
n=[]
t=[]
for indic in here_we_go['id']:
try:
thing=pd.DataFrame(wb.get_series(indic,mrv=50)).reset_index()
except:
pass
merged=pd.merge(data,thing,how='left',on=['Country','Year'])
n_i=(len(merged[merged.iloc[:,col].notnull() & merged.iloc[:,(merged.shape[1]-1)].notnull()]))
n.append(n_i)
cor_i=merged.iloc[:,col].corr(merged.iloc[:,(merged.shape[1]-1)])
cors.append(cor_i)
if cor_i==1 or cor_i==-1: # Avoid division by 0
t.append(None)
else:
t.append((cor_i*(sqrt((n_i-2)/(1-(cor_i*cor_i))))))
indicators.append(thing.loc[0,'Series'])
if change==False:
if t_lim==0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n)),columns=['Indicator','Correlation','n']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n>nlim) & ((almost_there.Correlation>cor_lim) | (almost_there.Correlation<-cor_lim))].head(k)
if t_lim != 0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,t)),columns=['Indicator','Correlation','n','t']).sort_values(by='Correlation',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n>nlim) & ((almost_there.Correlation>cor_lim) | (almost_there.Correlation<-cor_lim)) & ((almost_there.t>t_lim) | (almost_there.t<-t_lim))].head(k)
if change==True:
cors_change=[]
n_change=[]
t_change=[]
mumbo=pd.DataFrame() # Create a Pandas DataFrame with the data on the chosen indicator using the world_bank_data package
for country in data['Country'].unique():
s=data[data['Country']==country]
s.loc[:,'lag_dat']=s.iloc[:,col].shift(-1) # Generates warning message if pandas option is not changed above
s.loc[:,'pct_chg_dat']=(((s.iloc[:,col]-s['lag_dat'])/s['lag_dat'])*100)
mumbo=pd.concat([mumbo,s])
for indic in here_we_go['id']:
jumbo=pd.DataFrame() #Empty dataframe to contain the percent change data for World Bank data
try:
thing=pd.DataFrame(wb.get_series(indic,mrv=50)).reset_index()
except:
pass
for country in thing['Country'].unique():
t=thing[thing['Country']==country]
t.loc[:,'lag_ind']=t.iloc[:,3].shift(-1) # Generates warning message if pandas option is not changed above
t.loc[:,'pct_chg_ind']=(((t.iloc[:,3]-t['lag_ind'])/t['lag_ind'])*100)
jumbo=pd.concat([jumbo,t])
merged_pct=pd.merge(mumbo,jumbo,how='left',on=['Country','Year'])
cor_chg_i=merged_pct.loc[:,'pct_chg_dat'].corr(merged_pct.loc[:,'pct_chg_ind'])
cors_change.append(cor_chg_i)
n_chg_i=len(merged_pct[merged_pct.loc[:,'pct_chg_dat'].notnull() & merged_pct.loc[:,'pct_chg_ind'].notnull()])
n_change.append(n_chg_i)
if (cor_chg_i==1 or cor_chg_i==-1):
t_change.append(None)
else:
t_change.append(cor_chg_i*sqrt(((n_chg_i-2)/(1-(cor_chg_i*cor_chg_i)))))
if t_lim==0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,cors_change,n_change)),columns=['Indicator','Correlation','n','Correlation_change','n_change']).sort_values(by='Correlation_change',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n_change>nlim) & ((almost_there.Correlation_change>cor_lim) | (almost_there.Correlation_change<-cor_lim))].head(k)
if t_lim!=0:
almost_there = pd.DataFrame(list(zip(indicators,cors,n,t,cors_change,n_change,t_change)),columns=['Indicator','Correlation','n','t','Correlation_change','n_change','t_change']).sort_values(by='Correlation_change',key=abs,ascending=False).set_index('Indicator')
return almost_there.loc[(almost_there.n_change>nlim) & ((almost_there.Correlation_change>cor_lim) | (almost_there.Correlation_change<-cor_lim)) & ((almost_there.t_change>t_lim) | (almost_there.t_change<(-t_lim)))].head(k)
pd.options.mode.chained_assignment = orig_value
| 75.978764
| 442
| 0.657647
| 5,810
| 39,357
| 4.336145
| 0.073322
| 0.031437
| 0.010717
| 0.021871
| 0.864327
| 0.847219
| 0.838804
| 0.804271
| 0.801453
| 0.798079
| 0
| 0.025438
| 0.202912
| 39,357
| 517
| 443
| 76.125725
| 0.777629
| 0.055594
| 0
| 0.722689
| 0
| 0.008403
| 0.239518
| 0
| 0
| 0
| 0
| 0
| 0.106443
| 1
| 0.011204
| false
| 0.014006
| 0.019608
| 0
| 0.07563
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
01b18aad97e402b9cba88203c46d02b4787436f6
| 77
|
py
|
Python
|
numerous/engine/model/graph_representation/__init__.py
|
amipy/numerous
|
46eb3806ad904c3c0b3dccad7f8f3ddb5582cbd9
|
[
"BSD-3-Clause"
] | 20
|
2019-12-11T18:19:39.000Z
|
2022-01-30T15:37:58.000Z
|
numerous/engine/model/graph_representation/__init__.py
|
amipy/numerous
|
46eb3806ad904c3c0b3dccad7f8f3ddb5582cbd9
|
[
"BSD-3-Clause"
] | 38
|
2020-04-11T22:25:58.000Z
|
2022-03-29T12:24:15.000Z
|
numerous/engine/model/graph_representation/__init__.py
|
amipy/numerous
|
46eb3806ad904c3c0b3dccad7f8f3ddb5582cbd9
|
[
"BSD-3-Clause"
] | 7
|
2019-12-21T12:12:09.000Z
|
2021-12-02T14:12:09.000Z
|
from .mappings_graph import *
from .lower_graph import *
from .graph import *
| 25.666667
| 29
| 0.779221
| 11
| 77
| 5.272727
| 0.454545
| 0.568966
| 0.517241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 77
| 3
| 30
| 25.666667
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bf0c0c7b3f5e733b6958635fcc95fb027afb9898
| 7,991
|
py
|
Python
|
riak/tests/test_feature_detection.py
|
lixen/riak-python-client
|
43c0e2d43b185180fe8dd02ca759fbfb54bfec4b
|
[
"Apache-2.0"
] | 89
|
2015-01-06T01:54:57.000Z
|
2020-11-25T04:47:09.000Z
|
riak/tests/test_feature_detection.py
|
lixen/riak-python-client
|
43c0e2d43b185180fe8dd02ca759fbfb54bfec4b
|
[
"Apache-2.0"
] | 125
|
2015-01-05T09:32:37.000Z
|
2021-06-27T21:28:51.000Z
|
riak/tests/test_feature_detection.py
|
lixen/riak-python-client
|
43c0e2d43b185180fe8dd02ca759fbfb54bfec4b
|
[
"Apache-2.0"
] | 73
|
2015-01-19T18:04:35.000Z
|
2022-03-25T17:10:51.000Z
|
# Copyright 2010-present Basho Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
import unittest
from riak.transports.feature_detect import FeatureDetection
class IncompleteTransport(FeatureDetection):
pass
class DummyTransport(FeatureDetection):
def __init__(self, version):
self._version = version
def _server_version(self):
return self._version
class FeatureDetectionTest(unittest.TestCase):
def test_implements_server_version(self):
t = IncompleteTransport()
with self.assertRaises(NotImplementedError):
t.server_version
def test_pre_10(self):
t = DummyTransport("0.14.2")
self.assertFalse(t.phaseless_mapred())
self.assertFalse(t.pb_indexes())
self.assertFalse(t.pb_search())
self.assertFalse(t.pb_conditionals())
self.assertFalse(t.quorum_controls())
self.assertFalse(t.tombstone_vclocks())
self.assertFalse(t.pb_head())
self.assertFalse(t.pb_clear_bucket_props())
self.assertFalse(t.pb_all_bucket_props())
self.assertFalse(t.counters())
self.assertFalse(t.stream_indexes())
self.assertFalse(t.index_term_regex())
self.assertFalse(t.bucket_types())
self.assertFalse(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_10(self):
t = DummyTransport("1.0.3")
self.assertFalse(t.phaseless_mapred())
self.assertFalse(t.pb_indexes())
self.assertFalse(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertFalse(t.pb_clear_bucket_props())
self.assertFalse(t.pb_all_bucket_props())
self.assertFalse(t.counters())
self.assertFalse(t.stream_indexes())
self.assertFalse(t.index_term_regex())
self.assertFalse(t.bucket_types())
self.assertFalse(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_11(self):
t = DummyTransport("1.1.4")
self.assertTrue(t.phaseless_mapred())
self.assertFalse(t.pb_indexes())
self.assertFalse(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertFalse(t.pb_clear_bucket_props())
self.assertFalse(t.pb_all_bucket_props())
self.assertFalse(t.counters())
self.assertFalse(t.stream_indexes())
self.assertFalse(t.index_term_regex())
self.assertFalse(t.bucket_types())
self.assertFalse(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_12(self):
t = DummyTransport("1.2.0")
self.assertTrue(t.phaseless_mapred())
self.assertTrue(t.pb_indexes())
self.assertTrue(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertFalse(t.pb_clear_bucket_props())
self.assertFalse(t.pb_all_bucket_props())
self.assertFalse(t.counters())
self.assertFalse(t.stream_indexes())
self.assertFalse(t.index_term_regex())
self.assertFalse(t.bucket_types())
self.assertFalse(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_12_loose(self):
t = DummyTransport("1.2.1p3")
self.assertTrue(t.phaseless_mapred())
self.assertTrue(t.pb_indexes())
self.assertTrue(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertFalse(t.pb_clear_bucket_props())
self.assertFalse(t.pb_all_bucket_props())
self.assertFalse(t.counters())
self.assertFalse(t.stream_indexes())
self.assertFalse(t.index_term_regex())
self.assertFalse(t.bucket_types())
self.assertFalse(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_14(self):
t = DummyTransport("1.4.0rc1")
self.assertTrue(t.phaseless_mapred())
self.assertTrue(t.pb_indexes())
self.assertTrue(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertTrue(t.pb_clear_bucket_props())
self.assertTrue(t.pb_all_bucket_props())
self.assertTrue(t.counters())
self.assertTrue(t.stream_indexes())
self.assertFalse(t.index_term_regex())
self.assertFalse(t.bucket_types())
self.assertFalse(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_144(self):
t = DummyTransport("1.4.6")
self.assertTrue(t.phaseless_mapred())
self.assertTrue(t.pb_indexes())
self.assertTrue(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertTrue(t.pb_clear_bucket_props())
self.assertTrue(t.pb_all_bucket_props())
self.assertTrue(t.counters())
self.assertTrue(t.stream_indexes())
self.assertTrue(t.index_term_regex())
self.assertFalse(t.bucket_types())
self.assertFalse(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_20(self):
t = DummyTransport("2.0.1")
self.assertTrue(t.phaseless_mapred())
self.assertTrue(t.pb_indexes())
self.assertTrue(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertTrue(t.pb_clear_bucket_props())
self.assertTrue(t.pb_all_bucket_props())
self.assertTrue(t.counters())
self.assertTrue(t.stream_indexes())
self.assertTrue(t.index_term_regex())
self.assertTrue(t.bucket_types())
self.assertTrue(t.datatypes())
self.assertFalse(t.preflists())
self.assertFalse(t.write_once())
def test_21(self):
t = DummyTransport("2.1.0")
self.assertTrue(t.phaseless_mapred())
self.assertTrue(t.pb_indexes())
self.assertTrue(t.pb_search())
self.assertTrue(t.pb_conditionals())
self.assertTrue(t.quorum_controls())
self.assertTrue(t.tombstone_vclocks())
self.assertTrue(t.pb_head())
self.assertTrue(t.pb_clear_bucket_props())
self.assertTrue(t.pb_all_bucket_props())
self.assertTrue(t.counters())
self.assertTrue(t.stream_indexes())
self.assertTrue(t.index_term_regex())
self.assertTrue(t.bucket_types())
self.assertTrue(t.datatypes())
self.assertTrue(t.preflists())
self.assertTrue(t.write_once())
if __name__ == '__main__':
unittest.main()
| 37.341121
| 74
| 0.659742
| 970
| 7,991
| 5.235052
| 0.141237
| 0.209531
| 0.224498
| 0.12052
| 0.773533
| 0.756991
| 0.754234
| 0.754234
| 0.754234
| 0.754234
| 0
| 0.009183
| 0.209611
| 7,991
| 213
| 75
| 37.516432
| 0.794807
| 0.073958
| 0
| 0.775281
| 0
| 0
| 0.007989
| 0
| 0
| 0
| 0
| 0
| 0.814607
| 1
| 0.067416
| false
| 0.005618
| 0.011236
| 0.005618
| 0.101124
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bf26cb8f61884cd7b936fb9dd70d968e32c92a22
| 9,417
|
py
|
Python
|
loldib/getpros/views.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getpros/views.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getpros/views.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
from getpros.models import NA_Pros #, EUW_Pros, EUNE_Pros, KR_Pros, JP_Pros, BR_Pros, RU_Pros, LAN_Pros, LAS_Pros, OCE_Pros, TR_Pros
import cassiopeia as cass
def index(request):
cass.set_riot_api_key("RGAPI-daa909e6-146f-4c9d-a03a-2ff08bac37be")
cass.set_default_region("NA")
na_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5')
# euw_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='EUW')
# eune_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='EUNE')
# kr_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='KR')
# jp_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='JP')
# br_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='BR')
# ru_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='RU')
# lan_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='LAN')
# las_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='LAS')
# oce_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='OCE')
# tr_challenger_players = cass.get_challenger_league(queue='RANKED_SOLO_5x5', region='TR')
for p in na_challenger_players:
obj, created = NA_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
if created:
print('Created entry: ' + obj.summoner_name)
else:
print('Found entry: ' + obj.summoner_name)
# for p in euw_challenger_players:
# obj, created = EUW_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in eune_challenger_players:
# obj, created = EUNE_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in kr_challenger_players:
# obj, created = KR_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in jp_challenger_players:
# obj, created = JP_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in br_challenger_players:
# obj, created = BR_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in ru_challenger_players:
# obj, created = RU_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in lan_challenger_players:
# obj, created = LAN_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in las_challenger_players:
# obj, created = LAS_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in oce_challenger_players:
# obj, created = OCE_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in tr_challenger_players:
# obj, created = TR_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
na_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5')
# euw_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='EUW')
# eune_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='EUNE')
# kr_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='KR')
# jp_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='JP')
# br_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='BR')
# ru_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='RU')
# lan_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='LAN')
# las_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='LAS')
# oce_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='OCE')
# tr_master_players = cass.get_master_league(queue='RANKED_SOLO_5x5', region='TR')
for p in na_master_players:
obj, created = NA_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
if created:
print('Created entry: ' + obj.summoner_name)
else:
print('Found entry: ' + obj.summoner_name)
# for p in euw_master_players:
# obj, created = EUW_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in eune_master_players:
# obj, created = EUNE_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in kr_master_players:
# obj, created = KR_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in jp_master_players:
# obj, created = JP_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in br_master_players:
# obj, created = BR_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in ru_master_players:
# obj, created = RU_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in lan_master_players:
# obj, created = LAN_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in las_master_players:
# obj, created = LAS_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in oce_master_players:
# obj, created = OCE_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
# for p in tr_master_players:
# obj, created = TR_Pros.objects.update_or_create(summoner_ID=p.summoner.id, defaults={'summoner_name':p.summoner.name})
# if created:
# print('Created entry: ' + obj.summoner_name)
# else:
# print('Found entry: ' + obj.summoner_name)
#
return render(request, 'getpros/header.html')
| 56.389222
| 133
| 0.660401
| 1,222
| 9,417
| 4.806056
| 0.054828
| 0.179806
| 0.119871
| 0.149838
| 0.922527
| 0.922527
| 0.921505
| 0.921505
| 0.9055
| 0.9055
| 0
| 0.008106
| 0.213975
| 9,417
| 166
| 134
| 56.728916
| 0.785328
| 0.841457
| 0
| 0.47619
| 0
| 0
| 0.147679
| 0.035443
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.142857
| 0
| 0.238095
| 0.190476
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
17223690c76f0643e698b26a944ff009c13a2e28
| 165
|
py
|
Python
|
anchore_engine/clients/__init__.py
|
dspalmer99/anchore-engine
|
8c61318be6fec5d767426fa4ccd98472cc85b5cd
|
[
"Apache-2.0"
] | 1,484
|
2017-09-11T19:08:42.000Z
|
2022-03-29T07:47:44.000Z
|
anchore_engine/clients/__init__.py
|
dspalmer99/anchore-engine
|
8c61318be6fec5d767426fa4ccd98472cc85b5cd
|
[
"Apache-2.0"
] | 913
|
2017-09-27T20:37:53.000Z
|
2022-03-29T17:21:28.000Z
|
anchore_engine/clients/__init__.py
|
dspalmer99/anchore-engine
|
8c61318be6fec5d767426fa4ccd98472cc85b5cd
|
[
"Apache-2.0"
] | 294
|
2017-09-12T16:54:03.000Z
|
2022-03-14T01:28:51.000Z
|
import requests.packages
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
| 33
| 71
| 0.89697
| 16
| 165
| 9.1875
| 0.5625
| 0.326531
| 0.312925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012739
| 0.048485
| 165
| 4
| 72
| 41.25
| 0.923567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
172a110a1f59742600edf93a4b58a61032c0a3e3
| 204
|
py
|
Python
|
ttp/match/to.py
|
tobiasjohanssonsdnit/ttp
|
0582cc82c07f059d489cc20af5109b027ada8f11
|
[
"MIT"
] | null | null | null |
ttp/match/to.py
|
tobiasjohanssonsdnit/ttp
|
0582cc82c07f059d489cc20af5109b027ada8f11
|
[
"MIT"
] | null | null | null |
ttp/match/to.py
|
tobiasjohanssonsdnit/ttp
|
0582cc82c07f059d489cc20af5109b027ada8f11
|
[
"MIT"
] | null | null | null |
def to_str(data):
return str(data), None
def to_list(data):
return [data], None
def to_int(data):
try:
return int(data), None
except ValueError:
return data, None
| 18.545455
| 30
| 0.593137
| 29
| 204
| 4.068966
| 0.37931
| 0.271186
| 0.186441
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.303922
| 204
| 11
| 31
| 18.545455
| 0.830986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.222222
| 0.777778
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
177889e13306a59182e8e0d00028314ee1176015
| 16,952
|
py
|
Python
|
google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py
|
tswast/python-datastore
|
ddbbb03fc42bf53e698b3869a660a9938b3339e8
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py
|
tswast/python-datastore
|
ddbbb03fc42bf53e698b3869a660a9938b3339e8
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py
|
tswast/python-datastore
|
ddbbb03fc42bf53e698b3869a660a9938b3339e8
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.datastore_admin_v1.proto import (
datastore_admin_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2,
)
from google.cloud.datastore_admin_v1.proto import (
index_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2,
)
from google.longrunning import (
operations_pb2 as google_dot_longrunning_dot_operations__pb2,
)
class DatastoreAdminStub(object):
"""Google Cloud Datastore Admin API
The Datastore Admin API provides several admin services for Cloud Datastore.
-----------------------------------------------------------------------------
## Concepts
Project, namespace, kind, and entity as defined in the Google Cloud Datastore
API.
Operation: An Operation represents work being performed in the background.
EntityFilter: Allows specifying a subset of entities in a project. This is
specified as a combination of kinds and namespaces (either or both of which
may be all).
-----------------------------------------------------------------------------
## Services
# Export/Import
The Export/Import service provides the ability to copy all or a subset of
entities to/from Google Cloud Storage.
Exported data may be imported into Cloud Datastore for any Google Cloud
Platform project. It is not restricted to the export source project. It is
possible to export from one project and then import into another.
Exported data can also be loaded into Google BigQuery for analysis.
Exports and imports are performed asynchronously. An Operation resource is
created for each export/import. The state (including any errors encountered)
of the export/import may be queried via the Operation resource.
# Index
The index service manages Cloud Datastore composite indexes.
Index creation and deletion are performed asynchronously.
An Operation resource is created for each such asynchronous operation.
The state of the operation (including any errors encountered)
may be queried via the Operation resource.
# Operation
The Operations collection provides a record of actions performed for the
specified project (including any operations in progress). Operations are not
created directly but through calls on other collections or resources.
An operation that is not yet done may be cancelled. The request to cancel is
asynchronous and the operation may continue to run for some time after the
request to cancel is made.
An operation that is done may be deleted so that it is no longer listed as
part of the Operation collection.
ListOperations returns all pending operations, but not completed operations.
Operations are created by service DatastoreAdmin,
but are accessed via service google.longrunning.Operations.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ExportEntities = channel.unary_unary(
"/google.datastore.admin.v1.DatastoreAdmin/ExportEntities",
request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.ImportEntities = channel.unary_unary(
"/google.datastore.admin.v1.DatastoreAdmin/ImportEntities",
request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.GetIndex = channel.unary_unary(
"/google.datastore.admin.v1.DatastoreAdmin/GetIndex",
request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString,
)
self.ListIndexes = channel.unary_unary(
"/google.datastore.admin.v1.DatastoreAdmin/ListIndexes",
request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString,
)
class DatastoreAdminServicer(object):
"""Google Cloud Datastore Admin API
The Datastore Admin API provides several admin services for Cloud Datastore.
-----------------------------------------------------------------------------
## Concepts
Project, namespace, kind, and entity as defined in the Google Cloud Datastore
API.
Operation: An Operation represents work being performed in the background.
EntityFilter: Allows specifying a subset of entities in a project. This is
specified as a combination of kinds and namespaces (either or both of which
may be all).
-----------------------------------------------------------------------------
## Services
# Export/Import
The Export/Import service provides the ability to copy all or a subset of
entities to/from Google Cloud Storage.
Exported data may be imported into Cloud Datastore for any Google Cloud
Platform project. It is not restricted to the export source project. It is
possible to export from one project and then import into another.
Exported data can also be loaded into Google BigQuery for analysis.
Exports and imports are performed asynchronously. An Operation resource is
created for each export/import. The state (including any errors encountered)
of the export/import may be queried via the Operation resource.
# Index
The index service manages Cloud Datastore composite indexes.
Index creation and deletion are performed asynchronously.
An Operation resource is created for each such asynchronous operation.
The state of the operation (including any errors encountered)
may be queried via the Operation resource.
# Operation
The Operations collection provides a record of actions performed for the
specified project (including any operations in progress). Operations are not
created directly but through calls on other collections or resources.
An operation that is not yet done may be cancelled. The request to cancel is
asynchronous and the operation may continue to run for some time after the
request to cancel is made.
An operation that is done may be deleted so that it is no longer listed as
part of the Operation collection.
ListOperations returns all pending operations, but not completed operations.
Operations are created by service DatastoreAdmin,
but are accessed via service google.longrunning.Operations.
"""
def ExportEntities(self, request, context):
"""Exports a copy of all or a subset of entities from Google Cloud Datastore
to another storage system, such as Google Cloud Storage. Recent updates to
entities may not be reflected in the export. The export occurs in the
background and its progress can be monitored and managed via the
Operation resource that is created. The output of an export may only be
used once the associated operation is done. If an export operation is
cancelled before completion it may leave partial data behind in Google
Cloud Storage.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ImportEntities(self, request, context):
"""Imports entities into Google Cloud Datastore. Existing entities with the
same key are overwritten. The import occurs in the background and its
progress can be monitored and managed via the Operation resource that is
created. If an ImportEntities operation is cancelled, it is possible
that a subset of the data has already been imported to Cloud Datastore.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetIndex(self, request, context):
"""Gets an index.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListIndexes(self, request, context):
"""Lists the indexes that match the specified filters. Datastore uses an
eventually consistent query to fetch the list of indexes and may
occasionally return stale results.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_DatastoreAdminServicer_to_server(servicer, server):
rpc_method_handlers = {
"ExportEntities": grpc.unary_unary_rpc_method_handler(
servicer.ExportEntities,
request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"ImportEntities": grpc.unary_unary_rpc_method_handler(
servicer.ImportEntities,
request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"GetIndex": grpc.unary_unary_rpc_method_handler(
servicer.GetIndex,
request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.FromString,
response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.SerializeToString,
),
"ListIndexes": grpc.unary_unary_rpc_method_handler(
servicer.ListIndexes,
request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.FromString,
response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.datastore.admin.v1.DatastoreAdmin", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class DatastoreAdmin(object):
"""Google Cloud Datastore Admin API
The Datastore Admin API provides several admin services for Cloud Datastore.
-----------------------------------------------------------------------------
## Concepts
Project, namespace, kind, and entity as defined in the Google Cloud Datastore
API.
Operation: An Operation represents work being performed in the background.
EntityFilter: Allows specifying a subset of entities in a project. This is
specified as a combination of kinds and namespaces (either or both of which
may be all).
-----------------------------------------------------------------------------
## Services
# Export/Import
The Export/Import service provides the ability to copy all or a subset of
entities to/from Google Cloud Storage.
Exported data may be imported into Cloud Datastore for any Google Cloud
Platform project. It is not restricted to the export source project. It is
possible to export from one project and then import into another.
Exported data can also be loaded into Google BigQuery for analysis.
Exports and imports are performed asynchronously. An Operation resource is
created for each export/import. The state (including any errors encountered)
of the export/import may be queried via the Operation resource.
# Index
The index service manages Cloud Datastore composite indexes.
Index creation and deletion are performed asynchronously.
An Operation resource is created for each such asynchronous operation.
The state of the operation (including any errors encountered)
may be queried via the Operation resource.
# Operation
The Operations collection provides a record of actions performed for the
specified project (including any operations in progress). Operations are not
created directly but through calls on other collections or resources.
An operation that is not yet done may be cancelled. The request to cancel is
asynchronous and the operation may continue to run for some time after the
request to cancel is made.
An operation that is done may be deleted so that it is no longer listed as
part of the Operation collection.
ListOperations returns all pending operations, but not completed operations.
Operations are created by service DatastoreAdmin,
but are accessed via service google.longrunning.Operations.
"""
@staticmethod
def ExportEntities(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.datastore.admin.v1.DatastoreAdmin/ExportEntities",
google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ImportEntities(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.datastore.admin.v1.DatastoreAdmin/ImportEntities",
google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString,
google_dot_longrunning_dot_operations__pb2.Operation.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def GetIndex(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.datastore.admin.v1.DatastoreAdmin/GetIndex",
google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString,
google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
@staticmethod
def ListIndexes(
request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None,
):
return grpc.experimental.unary_unary(
request,
target,
"/google.datastore.admin.v1.DatastoreAdmin/ListIndexes",
google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString,
google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString,
options,
channel_credentials,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
)
| 40.848193
| 149
| 0.70735
| 1,978
| 16,952
| 5.82912
| 0.123357
| 0.065568
| 0.053079
| 0.029488
| 0.867823
| 0.864701
| 0.851518
| 0.838335
| 0.812663
| 0.812663
| 0
| 0.004626
| 0.222216
| 16,952
| 414
| 150
| 40.94686
| 0.869852
| 0.477879
| 0
| 0.640884
| 1
| 0
| 0.085854
| 0.057563
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055249
| false
| 0
| 0.077348
| 0.022099
| 0.171271
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
17963a3208c6a0556b7b03c34314cf11533d148c
| 9,956
|
py
|
Python
|
tests/application/data/test_dimensions.py
|
AlexKouzy/ethnicity-facts-and-figures-publisher
|
18ab2495a8633f585e18e607c7f75daa564a053d
|
[
"MIT"
] | 1
|
2021-10-06T13:48:36.000Z
|
2021-10-06T13:48:36.000Z
|
tests/application/data/test_dimensions.py
|
AlexKouzy/ethnicity-facts-and-figures-publisher
|
18ab2495a8633f585e18e607c7f75daa564a053d
|
[
"MIT"
] | 116
|
2018-11-02T17:20:47.000Z
|
2022-02-09T11:06:22.000Z
|
tests/application/data/test_dimensions.py
|
racedisparityaudit/rd_cms
|
a12f0e3f5461cc41eed0077ed02e11efafc5dd76
|
[
"MIT"
] | 2
|
2018-11-09T16:47:35.000Z
|
2020-04-09T13:06:48.000Z
|
from flask import url_for
from application.data.dimensions import DimensionObjectBuilder
from application.utils import write_dimension_csv
from application.cms.models import UKCountry
from tests.models import MeasureVersionWithDimensionFactory, ClassificationFactory, DataSourceFactory
from tests.test_data.chart_and_table import simple_table, grouped_table
def test_table_object_builder_does_build_object_from_simple_table():
measure_version = MeasureVersionWithDimensionFactory(dimensions__dimension_table__table_object=simple_table())
# given - a table without a category_caption value
builder = DimensionObjectBuilder()
dimension = measure_version.dimensions[0]
# when we process the object
table_object = builder.build(dimension)
# then the header for the returned table should match the ones from the simple table
assert table_object is not None
assert table_object.get("table").get("title") == "Title of simple table"
def test_table_object_builder_does_build_object_from_grouped_table():
measure_version = MeasureVersionWithDimensionFactory(dimensions__dimension_table__table_object=grouped_table())
# given - a table without a category_caption value
builder = DimensionObjectBuilder()
dimension = measure_version.dimensions[0]
# when we process the object
table_object = builder.build(dimension)
# then the header for the returned table should match the ones from the grouped table
assert table_object is not None
assert table_object.get("table").get("title") == "Title of grouped table"
def test_table_object_builder_does_build_with_page_level_data_from_simple_table():
data_source = DataSourceFactory.build(
title="DWP Stats", source_url="http://dwp.gov.uk", publisher__name="Department for Work and Pensions"
)
measure_version = MeasureVersionWithDimensionFactory(
title="Test Measure Page",
area_covered=[UKCountry.ENGLAND],
data_sources=[data_source],
dimensions__dimension_table__table_object=simple_table(),
measure__slug="test-measure-page-slug",
)
# given - a table without a category_caption value
builder = DimensionObjectBuilder()
dimension = measure_version.dimensions[0]
# when we process the object
dimension_object = builder.build(dimension)
# then the measure level info should be brought through
assert dimension_object["context"]["measure"] == "Test Measure Page"
assert dimension_object["context"]["measure_slug"] == "test-measure-page-slug"
assert dimension_object["context"]["location"] == "England"
assert dimension_object["context"]["title"] == "DWP Stats"
assert dimension_object["context"]["source_url"] == "http://dwp.gov.uk"
assert dimension_object["context"]["publisher"] == "Department for Work and Pensions"
def test_dimension_object_builder_does_build_with_page_level_data_from_grouped_table():
data_source = DataSourceFactory.build(
title="DWP Stats", source_url="http://dwp.gov.uk", publisher__name="Department for Work and Pensions"
)
measure_version = MeasureVersionWithDimensionFactory(
title="Test Measure Page",
area_covered=[UKCountry.ENGLAND],
data_sources=[data_source],
dimensions__dimension_table__table_object=grouped_table(),
measure__slug="test-measure-page-slug",
)
# given - a table without a category_caption value
builder = DimensionObjectBuilder()
dimension = measure_version.dimensions[0]
# when we process the object
dimension_object = builder.build(dimension)
# then the measure level info should be brought through
assert dimension_object["context"]["measure"] == "Test Measure Page"
assert dimension_object["context"]["measure_slug"] == "test-measure-page-slug"
assert dimension_object["context"]["location"] == "England"
assert dimension_object["context"]["title"] == "DWP Stats"
assert dimension_object["context"]["source_url"] == "http://dwp.gov.uk"
assert dimension_object["context"]["publisher"] == "Department for Work and Pensions"
def test_table_object_builder_does_build_with_dimension_level_data_from_simple_table():
measure_version = MeasureVersionWithDimensionFactory(
title="Test Measure Page",
area_covered=[UKCountry.ENGLAND],
dimensions__title="Dimension title",
dimensions__guid="dimension-guid",
dimensions__time_period="dimension-time-period",
dimensions__dimension_table__table_object=simple_table(),
measure__slug="test-measure-page-slug",
)
# given - a table without a category_caption value
builder = DimensionObjectBuilder()
dimension = measure_version.dimensions[0]
# when we process the object
dimension_object = builder.build(dimension)
# then the dimension level info should be brought through
assert dimension_object["context"]["dimension"] == "Dimension title"
assert dimension_object["context"]["guid"] == "dimension-guid"
assert dimension_object["context"]["time_period"] == "dimension-time-period"
def test_table_object_builder_does_build_with_dimension_level_data_from_grouped_table():
measure_version = MeasureVersionWithDimensionFactory(
dimensions__title="Dimension title",
dimensions__guid="dimension-guid",
dimensions__time_period="dimension-time-period",
dimensions__dimension_table__table_object=grouped_table(),
)
# given - a table without a category_caption value
builder = DimensionObjectBuilder()
dimension = measure_version.dimensions[0]
# when we process the object
dimension_object = builder.build(dimension)
# then the dimension level info should be brought through
assert dimension_object["context"]["dimension"] == "Dimension title"
assert dimension_object["context"]["guid"] == "dimension-guid"
assert dimension_object["context"]["time_period"] == "dimension-time-period"
def test_if_dimension_has_chart_download_chart_source_data(logged_in_rdu_user, test_app_client):
from tests.test_data.chart_and_table import chart, chart_settings_and_source_data
measure_version = MeasureVersionWithDimensionFactory(
dimensions__title="Dimension title",
dimensions__dimension_chart__chart_object=chart,
dimensions__dimension_chart__settings_and_source_data=chart_settings_and_source_data,
dimensions__dimension_chart__classification=ClassificationFactory(id="2A"),
dimensions__dimension_chart__includes_parents=False,
dimensions__dimension_chart__includes_all=True,
dimensions__dimension_chart__includes_unknown=False,
# No table
dimensions__dimension_table=None,
)
# GIVEN
# we have a dimension with only chart data
dimension = measure_version.dimensions[0]
resp = test_app_client.get(
url_for(
"static_site.dimension_file_download",
topic_slug=measure_version.measure.subtopic.topic.slug,
subtopic_slug=measure_version.measure.subtopic.slug,
measure_slug=measure_version.measure.slug,
version=measure_version.version,
dimension_guid=dimension.guid,
)
)
d = DimensionObjectBuilder.build(dimension)
# WHEN
# we generate a plain table csv
expected_csv = write_dimension_csv(dimension=d)
# THEN
# we get a return
assert resp.status_code == 200
assert resp.content_type == "text/csv"
assert resp.headers["Content-Disposition"] == 'attachment; filename="dimension-title.csv"'
# from the data in the chart
actual_data = resp.data.decode("utf-8")
assert actual_data == expected_csv
def test_if_dimension_has_chart_and_table_download_table_source_data(logged_in_rdu_user, test_app_client):
from tests.test_data.chart_and_table import (
chart,
chart_settings_and_source_data,
table,
table_settings_and_source_data,
)
measure_version = MeasureVersionWithDimensionFactory(
dimensions__title="Dimension title",
# Chart
dimensions__dimension_chart__chart_object=chart,
dimensions__dimension_chart__settings_and_source_data=chart_settings_and_source_data,
dimensions__dimension_chart__classification=ClassificationFactory(id="2A"),
dimensions__dimension_chart__includes_parents=False,
dimensions__dimension_chart__includes_all=True,
dimensions__dimension_chart__includes_unknown=False,
# Table
dimensions__dimension_table__table_object=table,
dimensions__dimension_table__settings_and_source_data=table_settings_and_source_data,
dimensions__dimension_table__classification=ClassificationFactory(id="5A"),
dimensions__dimension_table__includes_parents=True,
dimensions__dimension_table__includes_all=False,
dimensions__dimension_table__includes_unknown=True,
)
# GIVEN
# we have a dimension with table and chart data
dimension = measure_version.dimensions[0]
resp = test_app_client.get(
url_for(
"static_site.dimension_file_download",
topic_slug=measure_version.measure.subtopic.topic.slug,
subtopic_slug=measure_version.measure.subtopic.slug,
measure_slug=measure_version.measure.slug,
version=measure_version.version,
dimension_guid=dimension.guid,
)
)
# WHEN
# we generate a plain table csv
d = DimensionObjectBuilder.build(dimension)
expected_csv = write_dimension_csv(dimension=d)
# THEN
# we get a return
assert resp.status_code == 200
assert resp.content_type == "text/csv"
assert resp.headers["Content-Disposition"] == 'attachment; filename="dimension-title.csv"'
# from the data in the table (not chart)
actual_data = resp.data.decode("utf-8")
assert actual_data == expected_csv
| 42.186441
| 115
| 0.744677
| 1,155
| 9,956
| 6.049351
| 0.115152
| 0.067983
| 0.0541
| 0.072134
| 0.905682
| 0.892944
| 0.871189
| 0.858595
| 0.846572
| 0.809646
| 0
| 0.00231
| 0.173765
| 9,956
| 235
| 116
| 42.365957
| 0.84707
| 0.114504
| 0
| 0.72327
| 0
| 0
| 0.140547
| 0.036902
| 0
| 0
| 0
| 0
| 0.188679
| 1
| 0.050314
| false
| 0
| 0.050314
| 0
| 0.100629
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
179b6a639c861641a0c4555fbbd82c5c5c9b6703
| 36,167
|
py
|
Python
|
sdk/python/pulumi_cloudamqp/integration_metric.py
|
pulumi/pulumi-cloudamqp
|
1d411fb0076c257b51a6b133aaedb9292efa2373
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-09-23T11:53:33.000Z
|
2021-12-01T20:56:35.000Z
|
sdk/python/pulumi_cloudamqp/integration_metric.py
|
pulumi/pulumi-cloudamqp
|
1d411fb0076c257b51a6b133aaedb9292efa2373
|
[
"ECL-2.0",
"Apache-2.0"
] | 53
|
2019-12-09T20:12:27.000Z
|
2022-03-31T15:21:00.000Z
|
sdk/python/pulumi_cloudamqp/integration_metric.py
|
pulumi/pulumi-cloudamqp
|
1d411fb0076c257b51a6b133aaedb9292efa2373
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-12-11T09:29:16.000Z
|
2019-12-11T09:29:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['IntegrationMetricArgs', 'IntegrationMetric']
@pulumi.input_type
class IntegrationMetricArgs:
def __init__(__self__, *,
instance_id: pulumi.Input[int],
access_key_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
client_email: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
license_key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
queue_allowlist: Optional[pulumi.Input[str]] = None,
queue_whitelist: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
secret_access_key: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[str]] = None,
vhost_allowlist: Optional[pulumi.Input[str]] = None,
vhost_whitelist: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a IntegrationMetric resource.
:param pulumi.Input[int] instance_id: Instance identifier
:param pulumi.Input[str] access_key_id: AWS access key identifier. (Cloudwatch)
:param pulumi.Input[str] api_key: The API key for the integration service. (Librato)
:param pulumi.Input[str] client_email: The client email. (Stackdriver)
:param pulumi.Input[str] email: The email address registred for the integration service. (Librato)
:param pulumi.Input[str] license_key: The license key registred for the integration service. (New Relic)
:param pulumi.Input[str] name: The name of metrics integration
:param pulumi.Input[str] private_key: The private key. (Stackdriver)
:param pulumi.Input[str] project_id: Project ID. (Stackdriver)
:param pulumi.Input[str] queue_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] queue_whitelist: **Deprecated**
:param pulumi.Input[str] region: AWS region for Cloudwatch and [US/EU] for Data dog/New relic. (Cloudwatch, Data Dog, New Relic)
:param pulumi.Input[str] secret_access_key: AWS secret key. (Cloudwatch)
:param pulumi.Input[str] tags: (optional) tags. E.g. env=prod,region=europe
:param pulumi.Input[str] vhost_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] vhost_whitelist: **Deprecated**
"""
pulumi.set(__self__, "instance_id", instance_id)
if access_key_id is not None:
pulumi.set(__self__, "access_key_id", access_key_id)
if api_key is not None:
pulumi.set(__self__, "api_key", api_key)
if client_email is not None:
pulumi.set(__self__, "client_email", client_email)
if email is not None:
pulumi.set(__self__, "email", email)
if license_key is not None:
pulumi.set(__self__, "license_key", license_key)
if name is not None:
pulumi.set(__self__, "name", name)
if private_key is not None:
pulumi.set(__self__, "private_key", private_key)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if queue_allowlist is not None:
pulumi.set(__self__, "queue_allowlist", queue_allowlist)
if queue_whitelist is not None:
warnings.warn("""use queue_allowlist instead""", DeprecationWarning)
pulumi.log.warn("""queue_whitelist is deprecated: use queue_allowlist instead""")
if queue_whitelist is not None:
pulumi.set(__self__, "queue_whitelist", queue_whitelist)
if region is not None:
pulumi.set(__self__, "region", region)
if secret_access_key is not None:
pulumi.set(__self__, "secret_access_key", secret_access_key)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if vhost_allowlist is not None:
pulumi.set(__self__, "vhost_allowlist", vhost_allowlist)
if vhost_whitelist is not None:
warnings.warn("""use vhost_allowlist instead""", DeprecationWarning)
pulumi.log.warn("""vhost_whitelist is deprecated: use vhost_allowlist instead""")
if vhost_whitelist is not None:
pulumi.set(__self__, "vhost_whitelist", vhost_whitelist)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[int]:
"""
Instance identifier
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[int]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="accessKeyId")
def access_key_id(self) -> Optional[pulumi.Input[str]]:
"""
AWS access key identifier. (Cloudwatch)
"""
return pulumi.get(self, "access_key_id")
@access_key_id.setter
def access_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_key_id", value)
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> Optional[pulumi.Input[str]]:
"""
The API key for the integration service. (Librato)
"""
return pulumi.get(self, "api_key")
@api_key.setter
def api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_key", value)
@property
@pulumi.getter(name="clientEmail")
def client_email(self) -> Optional[pulumi.Input[str]]:
"""
The client email. (Stackdriver)
"""
return pulumi.get(self, "client_email")
@client_email.setter
def client_email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_email", value)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
The email address registred for the integration service. (Librato)
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@property
@pulumi.getter(name="licenseKey")
def license_key(self) -> Optional[pulumi.Input[str]]:
"""
The license key registred for the integration service. (New Relic)
"""
return pulumi.get(self, "license_key")
@license_key.setter
def license_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "license_key", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of metrics integration
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> Optional[pulumi.Input[str]]:
"""
The private key. (Stackdriver)
"""
return pulumi.get(self, "private_key")
@private_key.setter
def private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_key", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
Project ID. (Stackdriver)
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="queueAllowlist")
def queue_allowlist(self) -> Optional[pulumi.Input[str]]:
"""
(optional) allowlist using regular expression
"""
return pulumi.get(self, "queue_allowlist")
@queue_allowlist.setter
def queue_allowlist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "queue_allowlist", value)
@property
@pulumi.getter(name="queueWhitelist")
def queue_whitelist(self) -> Optional[pulumi.Input[str]]:
"""
**Deprecated**
"""
return pulumi.get(self, "queue_whitelist")
@queue_whitelist.setter
def queue_whitelist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "queue_whitelist", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
AWS region for Cloudwatch and [US/EU] for Data dog/New relic. (Cloudwatch, Data Dog, New Relic)
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="secretAccessKey")
def secret_access_key(self) -> Optional[pulumi.Input[str]]:
"""
AWS secret key. (Cloudwatch)
"""
return pulumi.get(self, "secret_access_key")
@secret_access_key.setter
def secret_access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_access_key", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[str]]:
"""
(optional) tags. E.g. env=prod,region=europe
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="vhostAllowlist")
def vhost_allowlist(self) -> Optional[pulumi.Input[str]]:
"""
(optional) allowlist using regular expression
"""
return pulumi.get(self, "vhost_allowlist")
@vhost_allowlist.setter
def vhost_allowlist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vhost_allowlist", value)
@property
@pulumi.getter(name="vhostWhitelist")
def vhost_whitelist(self) -> Optional[pulumi.Input[str]]:
"""
**Deprecated**
"""
return pulumi.get(self, "vhost_whitelist")
@vhost_whitelist.setter
def vhost_whitelist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vhost_whitelist", value)
@pulumi.input_type
class _IntegrationMetricState:
def __init__(__self__, *,
access_key_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
client_email: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[int]] = None,
license_key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
queue_allowlist: Optional[pulumi.Input[str]] = None,
queue_whitelist: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
secret_access_key: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[str]] = None,
vhost_allowlist: Optional[pulumi.Input[str]] = None,
vhost_whitelist: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering IntegrationMetric resources.
:param pulumi.Input[str] access_key_id: AWS access key identifier. (Cloudwatch)
:param pulumi.Input[str] api_key: The API key for the integration service. (Librato)
:param pulumi.Input[str] client_email: The client email. (Stackdriver)
:param pulumi.Input[str] email: The email address registred for the integration service. (Librato)
:param pulumi.Input[int] instance_id: Instance identifier
:param pulumi.Input[str] license_key: The license key registred for the integration service. (New Relic)
:param pulumi.Input[str] name: The name of metrics integration
:param pulumi.Input[str] private_key: The private key. (Stackdriver)
:param pulumi.Input[str] project_id: Project ID. (Stackdriver)
:param pulumi.Input[str] queue_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] queue_whitelist: **Deprecated**
:param pulumi.Input[str] region: AWS region for Cloudwatch and [US/EU] for Data dog/New relic. (Cloudwatch, Data Dog, New Relic)
:param pulumi.Input[str] secret_access_key: AWS secret key. (Cloudwatch)
:param pulumi.Input[str] tags: (optional) tags. E.g. env=prod,region=europe
:param pulumi.Input[str] vhost_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] vhost_whitelist: **Deprecated**
"""
if access_key_id is not None:
pulumi.set(__self__, "access_key_id", access_key_id)
if api_key is not None:
pulumi.set(__self__, "api_key", api_key)
if client_email is not None:
pulumi.set(__self__, "client_email", client_email)
if email is not None:
pulumi.set(__self__, "email", email)
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if license_key is not None:
pulumi.set(__self__, "license_key", license_key)
if name is not None:
pulumi.set(__self__, "name", name)
if private_key is not None:
pulumi.set(__self__, "private_key", private_key)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if queue_allowlist is not None:
pulumi.set(__self__, "queue_allowlist", queue_allowlist)
if queue_whitelist is not None:
warnings.warn("""use queue_allowlist instead""", DeprecationWarning)
pulumi.log.warn("""queue_whitelist is deprecated: use queue_allowlist instead""")
if queue_whitelist is not None:
pulumi.set(__self__, "queue_whitelist", queue_whitelist)
if region is not None:
pulumi.set(__self__, "region", region)
if secret_access_key is not None:
pulumi.set(__self__, "secret_access_key", secret_access_key)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if vhost_allowlist is not None:
pulumi.set(__self__, "vhost_allowlist", vhost_allowlist)
if vhost_whitelist is not None:
warnings.warn("""use vhost_allowlist instead""", DeprecationWarning)
pulumi.log.warn("""vhost_whitelist is deprecated: use vhost_allowlist instead""")
if vhost_whitelist is not None:
pulumi.set(__self__, "vhost_whitelist", vhost_whitelist)
@property
@pulumi.getter(name="accessKeyId")
def access_key_id(self) -> Optional[pulumi.Input[str]]:
"""
AWS access key identifier. (Cloudwatch)
"""
return pulumi.get(self, "access_key_id")
@access_key_id.setter
def access_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_key_id", value)
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> Optional[pulumi.Input[str]]:
"""
The API key for the integration service. (Librato)
"""
return pulumi.get(self, "api_key")
@api_key.setter
def api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_key", value)
@property
@pulumi.getter(name="clientEmail")
def client_email(self) -> Optional[pulumi.Input[str]]:
"""
The client email. (Stackdriver)
"""
return pulumi.get(self, "client_email")
@client_email.setter
def client_email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_email", value)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
The email address registred for the integration service. (Librato)
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[int]]:
"""
Instance identifier
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="licenseKey")
def license_key(self) -> Optional[pulumi.Input[str]]:
"""
The license key registred for the integration service. (New Relic)
"""
return pulumi.get(self, "license_key")
@license_key.setter
def license_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "license_key", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of metrics integration
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> Optional[pulumi.Input[str]]:
"""
The private key. (Stackdriver)
"""
return pulumi.get(self, "private_key")
@private_key.setter
def private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_key", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
Project ID. (Stackdriver)
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter(name="queueAllowlist")
def queue_allowlist(self) -> Optional[pulumi.Input[str]]:
"""
(optional) allowlist using regular expression
"""
return pulumi.get(self, "queue_allowlist")
@queue_allowlist.setter
def queue_allowlist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "queue_allowlist", value)
@property
@pulumi.getter(name="queueWhitelist")
def queue_whitelist(self) -> Optional[pulumi.Input[str]]:
"""
**Deprecated**
"""
return pulumi.get(self, "queue_whitelist")
@queue_whitelist.setter
def queue_whitelist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "queue_whitelist", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
AWS region for Cloudwatch and [US/EU] for Data dog/New relic. (Cloudwatch, Data Dog, New Relic)
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="secretAccessKey")
def secret_access_key(self) -> Optional[pulumi.Input[str]]:
"""
AWS secret key. (Cloudwatch)
"""
return pulumi.get(self, "secret_access_key")
@secret_access_key.setter
def secret_access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_access_key", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[str]]:
"""
(optional) tags. E.g. env=prod,region=europe
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="vhostAllowlist")
def vhost_allowlist(self) -> Optional[pulumi.Input[str]]:
"""
(optional) allowlist using regular expression
"""
return pulumi.get(self, "vhost_allowlist")
@vhost_allowlist.setter
def vhost_allowlist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vhost_allowlist", value)
@property
@pulumi.getter(name="vhostWhitelist")
def vhost_whitelist(self) -> Optional[pulumi.Input[str]]:
"""
**Deprecated**
"""
return pulumi.get(self, "vhost_whitelist")
@vhost_whitelist.setter
def vhost_whitelist(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vhost_whitelist", value)
class IntegrationMetric(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_key_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
client_email: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[int]] = None,
license_key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
queue_allowlist: Optional[pulumi.Input[str]] = None,
queue_whitelist: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
secret_access_key: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[str]] = None,
vhost_allowlist: Optional[pulumi.Input[str]] = None,
vhost_whitelist: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a IntegrationMetric resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_key_id: AWS access key identifier. (Cloudwatch)
:param pulumi.Input[str] api_key: The API key for the integration service. (Librato)
:param pulumi.Input[str] client_email: The client email. (Stackdriver)
:param pulumi.Input[str] email: The email address registred for the integration service. (Librato)
:param pulumi.Input[int] instance_id: Instance identifier
:param pulumi.Input[str] license_key: The license key registred for the integration service. (New Relic)
:param pulumi.Input[str] name: The name of metrics integration
:param pulumi.Input[str] private_key: The private key. (Stackdriver)
:param pulumi.Input[str] project_id: Project ID. (Stackdriver)
:param pulumi.Input[str] queue_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] queue_whitelist: **Deprecated**
:param pulumi.Input[str] region: AWS region for Cloudwatch and [US/EU] for Data dog/New relic. (Cloudwatch, Data Dog, New Relic)
:param pulumi.Input[str] secret_access_key: AWS secret key. (Cloudwatch)
:param pulumi.Input[str] tags: (optional) tags. E.g. env=prod,region=europe
:param pulumi.Input[str] vhost_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] vhost_whitelist: **Deprecated**
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IntegrationMetricArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a IntegrationMetric resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param IntegrationMetricArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IntegrationMetricArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_key_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
client_email: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[int]] = None,
license_key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
queue_allowlist: Optional[pulumi.Input[str]] = None,
queue_whitelist: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
secret_access_key: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[str]] = None,
vhost_allowlist: Optional[pulumi.Input[str]] = None,
vhost_whitelist: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IntegrationMetricArgs.__new__(IntegrationMetricArgs)
__props__.__dict__["access_key_id"] = access_key_id
__props__.__dict__["api_key"] = api_key
__props__.__dict__["client_email"] = client_email
__props__.__dict__["email"] = email
if instance_id is None and not opts.urn:
raise TypeError("Missing required property 'instance_id'")
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["license_key"] = license_key
__props__.__dict__["name"] = name
__props__.__dict__["private_key"] = private_key
__props__.__dict__["project_id"] = project_id
__props__.__dict__["queue_allowlist"] = queue_allowlist
if queue_whitelist is not None and not opts.urn:
warnings.warn("""use queue_allowlist instead""", DeprecationWarning)
pulumi.log.warn("""queue_whitelist is deprecated: use queue_allowlist instead""")
__props__.__dict__["queue_whitelist"] = queue_whitelist
__props__.__dict__["region"] = region
__props__.__dict__["secret_access_key"] = secret_access_key
__props__.__dict__["tags"] = tags
__props__.__dict__["vhost_allowlist"] = vhost_allowlist
if vhost_whitelist is not None and not opts.urn:
warnings.warn("""use vhost_allowlist instead""", DeprecationWarning)
pulumi.log.warn("""vhost_whitelist is deprecated: use vhost_allowlist instead""")
__props__.__dict__["vhost_whitelist"] = vhost_whitelist
super(IntegrationMetric, __self__).__init__(
'cloudamqp:index/integrationMetric:IntegrationMetric',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_key_id: Optional[pulumi.Input[str]] = None,
api_key: Optional[pulumi.Input[str]] = None,
client_email: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[int]] = None,
license_key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
queue_allowlist: Optional[pulumi.Input[str]] = None,
queue_whitelist: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
secret_access_key: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[str]] = None,
vhost_allowlist: Optional[pulumi.Input[str]] = None,
vhost_whitelist: Optional[pulumi.Input[str]] = None) -> 'IntegrationMetric':
"""
Get an existing IntegrationMetric resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_key_id: AWS access key identifier. (Cloudwatch)
:param pulumi.Input[str] api_key: The API key for the integration service. (Librato)
:param pulumi.Input[str] client_email: The client email. (Stackdriver)
:param pulumi.Input[str] email: The email address registred for the integration service. (Librato)
:param pulumi.Input[int] instance_id: Instance identifier
:param pulumi.Input[str] license_key: The license key registred for the integration service. (New Relic)
:param pulumi.Input[str] name: The name of metrics integration
:param pulumi.Input[str] private_key: The private key. (Stackdriver)
:param pulumi.Input[str] project_id: Project ID. (Stackdriver)
:param pulumi.Input[str] queue_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] queue_whitelist: **Deprecated**
:param pulumi.Input[str] region: AWS region for Cloudwatch and [US/EU] for Data dog/New relic. (Cloudwatch, Data Dog, New Relic)
:param pulumi.Input[str] secret_access_key: AWS secret key. (Cloudwatch)
:param pulumi.Input[str] tags: (optional) tags. E.g. env=prod,region=europe
:param pulumi.Input[str] vhost_allowlist: (optional) allowlist using regular expression
:param pulumi.Input[str] vhost_whitelist: **Deprecated**
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IntegrationMetricState.__new__(_IntegrationMetricState)
__props__.__dict__["access_key_id"] = access_key_id
__props__.__dict__["api_key"] = api_key
__props__.__dict__["client_email"] = client_email
__props__.__dict__["email"] = email
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["license_key"] = license_key
__props__.__dict__["name"] = name
__props__.__dict__["private_key"] = private_key
__props__.__dict__["project_id"] = project_id
__props__.__dict__["queue_allowlist"] = queue_allowlist
__props__.__dict__["queue_whitelist"] = queue_whitelist
__props__.__dict__["region"] = region
__props__.__dict__["secret_access_key"] = secret_access_key
__props__.__dict__["tags"] = tags
__props__.__dict__["vhost_allowlist"] = vhost_allowlist
__props__.__dict__["vhost_whitelist"] = vhost_whitelist
return IntegrationMetric(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessKeyId")
def access_key_id(self) -> pulumi.Output[Optional[str]]:
"""
AWS access key identifier. (Cloudwatch)
"""
return pulumi.get(self, "access_key_id")
@property
@pulumi.getter(name="apiKey")
def api_key(self) -> pulumi.Output[Optional[str]]:
"""
The API key for the integration service. (Librato)
"""
return pulumi.get(self, "api_key")
@property
@pulumi.getter(name="clientEmail")
def client_email(self) -> pulumi.Output[Optional[str]]:
"""
The client email. (Stackdriver)
"""
return pulumi.get(self, "client_email")
@property
@pulumi.getter
def email(self) -> pulumi.Output[Optional[str]]:
"""
The email address registred for the integration service. (Librato)
"""
return pulumi.get(self, "email")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[int]:
"""
Instance identifier
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter(name="licenseKey")
def license_key(self) -> pulumi.Output[Optional[str]]:
"""
The license key registred for the integration service. (New Relic)
"""
return pulumi.get(self, "license_key")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of metrics integration
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> pulumi.Output[Optional[str]]:
"""
The private key. (Stackdriver)
"""
return pulumi.get(self, "private_key")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[Optional[str]]:
"""
Project ID. (Stackdriver)
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter(name="queueAllowlist")
def queue_allowlist(self) -> pulumi.Output[Optional[str]]:
"""
(optional) allowlist using regular expression
"""
return pulumi.get(self, "queue_allowlist")
@property
@pulumi.getter(name="queueWhitelist")
def queue_whitelist(self) -> pulumi.Output[Optional[str]]:
"""
**Deprecated**
"""
return pulumi.get(self, "queue_whitelist")
@property
@pulumi.getter
def region(self) -> pulumi.Output[Optional[str]]:
"""
AWS region for Cloudwatch and [US/EU] for Data dog/New relic. (Cloudwatch, Data Dog, New Relic)
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="secretAccessKey")
def secret_access_key(self) -> pulumi.Output[Optional[str]]:
"""
AWS secret key. (Cloudwatch)
"""
return pulumi.get(self, "secret_access_key")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[str]]:
"""
(optional) tags. E.g. env=prod,region=europe
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="vhostAllowlist")
def vhost_allowlist(self) -> pulumi.Output[Optional[str]]:
"""
(optional) allowlist using regular expression
"""
return pulumi.get(self, "vhost_allowlist")
@property
@pulumi.getter(name="vhostWhitelist")
def vhost_whitelist(self) -> pulumi.Output[Optional[str]]:
"""
**Deprecated**
"""
return pulumi.get(self, "vhost_whitelist")
| 41.571264
| 136
| 0.633174
| 4,165
| 36,167
| 5.260984
| 0.043457
| 0.106426
| 0.125867
| 0.135542
| 0.91872
| 0.911236
| 0.896541
| 0.887459
| 0.880842
| 0.857658
| 0
| 0.000037
| 0.250643
| 36,167
| 869
| 137
| 41.619102
| 0.808464
| 0.226201
| 0
| 0.877778
| 1
| 0
| 0.105619
| 0.00275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161111
| false
| 0.001852
| 0.009259
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
da2782da7cee09a35a9a3e04dde4c97cb8b7e652
| 2,408
|
py
|
Python
|
parallel_image_generator.py
|
sumu007/Image-Processing
|
72d372aafa33c2772a6577dd17eee12b1a7aa3f7
|
[
"Apache-2.0"
] | 73
|
2019-02-15T16:26:20.000Z
|
2022-03-06T17:35:37.000Z
|
parallel_image_generator.py
|
Gowtham1729/Image-Processing-Toolbox
|
3af60e241c95aa54ca7c81ea4b12c72a9eaa59a9
|
[
"Apache-2.0"
] | null | null | null |
parallel_image_generator.py
|
Gowtham1729/Image-Processing-Toolbox
|
3af60e241c95aa54ca7c81ea4b12c72a9eaa59a9
|
[
"Apache-2.0"
] | 19
|
2019-09-26T19:17:39.000Z
|
2022-03-24T21:47:39.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 9 11:14:26 2018
@author: Gowtham
"""
import numpy as np
import cv2
img = cv2.imread("gray.bmp")
#cv2.imwrite("gray.bmp", img)
#cv2.imshow('image',a)
#cv2.waitKey(0)
#cv2.destroyAllWindows()
#shift 1 bit down
a = np.zeros(img.shape)
for i in range(1, len(img)):
for j in range(len(img[i])):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i-1][j][k] = img[i][j][k]
cv2.imwrite('down.bmp', a)
#shift 1 bit up
a = np.zeros(img.shape)
for i in range(len(img)-1):
for j in range(len(img[i])):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i+1][j][k] = img[i][j][k]
cv2.imwrite('up.bmp', a)
#shift 1 bit left
a = np.zeros(img.shape)
for i in range(len(img)):
for j in range(1, len(img[i])):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i][j-1][k] = img[i][j][k]
cv2.imwrite('left.bmp', a)
#shift 1 bit right
a = np.zeros(img.shape)
for i in range(len(img)):
for j in range(len(img[i])-1):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i][j+1][k] = img[i][j][k]
cv2.imwrite('right.bmp', a)
#shift 1 bit down 1 bit left
a = np.zeros(img.shape)
for i in range(1, len(img)):
for j in range(1, len(img[i])):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i-1][j-1][k] = img[i][j][k]
cv2.imwrite('leftdown.bmp', a)
#shift 1 bit up
a = np.zeros(img.shape)
for i in range(len(img)-1):
for j in range(len(img[i])-1):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i+1][j+1][k] = img[i][j][k]
cv2.imwrite('rightup.bmp', a)
#shift 1 bit left
a = np.zeros(img.shape)
for i in range(len(img)-1):
for j in range(1, len(img[i])):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i+1][j-1][k] = img[i][j][k]
cv2.imwrite('leftup.bmp', a)
#shift 1 bit right
a = np.zeros(img.shape)
for i in range(1, len(img)):
for j in range(len(img[i])-1):
for k in range(len(img[i][j])):
#print(img[i][j][k])
a[i-1][j+1][k] = img[i][j][k]
cv2.imwrite('rightdown.bmp', a)
| 24.08
| 42
| 0.488787
| 450
| 2,408
| 2.615556
| 0.113333
| 0.108751
| 0.101954
| 0.198811
| 0.809686
| 0.798641
| 0.798641
| 0.798641
| 0.798641
| 0.798641
| 0
| 0.035336
| 0.294851
| 2,408
| 100
| 43
| 24.08
| 0.657833
| 0.187292
| 0
| 0.627451
| 0
| 0
| 0.046423
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.039216
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
da37380e4a794f40ed556f522a640a9c71805448
| 1,562
|
py
|
Python
|
tests/test_markup.py
|
sahemmen/anchorman
|
69e027089ae3ff4be364776f3b76a3e5471015d1
|
[
"Apache-2.0"
] | 9
|
2015-05-26T21:53:48.000Z
|
2020-03-07T19:50:40.000Z
|
tests/test_markup.py
|
sahemmen/anchorman
|
69e027089ae3ff4be364776f3b76a3e5471015d1
|
[
"Apache-2.0"
] | 2
|
2017-07-20T12:54:18.000Z
|
2021-03-25T21:43:37.000Z
|
tests/test_markup.py
|
sahemmen/anchorman
|
69e027089ae3ff4be364776f3b76a3e5471015d1
|
[
"Apache-2.0"
] | 4
|
2015-05-05T12:08:45.000Z
|
2020-03-07T19:50:29.000Z
|
# -*- coding: utf-8 -*-
from anchorman import annotate, clean, get_config
from tests.utils import fix_bs4_parsing_spaces
from tests.data.dummy import LINKS
def test_remove_decoration():
annotated = """<div><p id="1">lala <span type="letterA"><a class="anchorman">A</a></span> la lala <span type="letterA"><a class="anchorman">AA</a></span> <span type="letterB"><a class="anchorman">BB</a></span> <span type="letterB"><a class="anchorman">B</a></span> la <span type="letterC"><a class="anchorman">C</a></span> lalala <span type="letterD">DDD</span> <span type="letterD">D</span> <span type="letterE">E</span></p> <p id="2">la <span type="letterE">E</span> <span type="letterE">EE</span> <span type="letterA">AA</span> lal <span type="letterC">CC</span> <span type="letterC">C</span> la la <span type="letterB">BB</span> la <span type="letterD">DD</span> <span type="letterD">D</span> lala <span type="letterE">EE</span> la</p> <p id="3"><span type="letterB">B</span> la <span type="letterB">BB</span> <span type="letterE">EEE</span> <span type="letterA">A</span> la <span type="letterC">CCC</span> <span type="letterB">B</span> la <span type="letterD">DDD</span> <span type="letterC">C</span> lala <span type="letterA">AAA</span> <span type="letterD">D</span> la <span type="letterB">BBB</span> <span type="letterE">E</span></p></div>"""
cleaned = """<div><p id="1">lala A la lala AA BB B la C lalala DDD D E</p><p id="2">la E EE AA lal CC C la la BB la DD D lala EE la</p><p id="3">B la BB EEE A la CCC B la DDD C lala AAA D la BBB E</p></div>"""
| 104.133333
| 1,154
| 0.660051
| 286
| 1,562
| 3.583916
| 0.202797
| 0.226341
| 0.163902
| 0.081951
| 0.598049
| 0.460488
| 0.292683
| 0.126829
| 0
| 0
| 0
| 0.005747
| 0.108835
| 1,562
| 14
| 1,155
| 111.571429
| 0.730603
| 0.013444
| 0
| 0
| 0
| 0.333333
| 0.86263
| 0.479818
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
da6113a188c903023a7d37426db0bf52105b08f5
| 14,868
|
py
|
Python
|
bibframe/ingesters/tutt_maps.py
|
jermnelson/aristotle-library-apps
|
f742847cd20c5b5c3b46dd53dfc395a2e1caa240
|
[
"Apache-2.0"
] | 2
|
2015-03-30T16:36:51.000Z
|
2016-06-15T01:39:47.000Z
|
bibframe/ingesters/tutt_maps.py
|
jermnelson/aristotle-library-apps
|
f742847cd20c5b5c3b46dd53dfc395a2e1caa240
|
[
"Apache-2.0"
] | 2
|
2021-06-10T17:43:54.000Z
|
2021-12-13T19:40:08.000Z
|
bibframe/ingesters/tutt_maps.py
|
jermnelson/aristotle-library-apps
|
f742847cd20c5b5c3b46dd53dfc395a2e1caa240
|
[
"Apache-2.0"
] | 1
|
2015-11-08T00:40:11.000Z
|
2015-11-08T00:40:11.000Z
|
#
# Tutt library III location codes
#
FULL_CODE_MAP = {
'dacc': 'Digital Archives of Colorado College',
'ewww':'Online',
'ewwwd':'Online Government Documents',
'ewwwp':'Online Periodcals',
'ewwwn':'Online',
'tarf':'Tutt Reference',
'tarfa':'Tutt 1st Floor South',
'tarfc':'Tutt Reference',
'tarfd':'Tutt Reference Desk',
'tarfg':'Tutt Reference Desk',
'tarfi':'Tutt Reference',
'tarfm':'Tutt Reference North 2nd Floor',
'tarfo':'Tutt Reference North 2nd Floor',
'tban':'Art Reading Room Tutt 2nd Floor South',
'tbanf':'Art Reading Room Tutt 2nd Floor South',
'tb':'Tutt 3rd Floor',
'tbnc':'Tutt 3rd Floor',
'tbndp':'Tutt Display',
'tbnew':'Tutt New Arrivals',
'tbp':'Tutt North Basement',
'tbpnc':'Tutt North Basement',
'tcat':'Tutt Cataloging Office',
'tcas':'CDROM/Cassette Tutt Circulation Desk',
'tcbs':'Tutt Leisure Reading',
'tcurr':'Tutt 3rd Floor',
'tdacd':'Government Documents CD-ROM index',
'tfly':'On The Fly',
'tgr':'Tutt North Basement',
'tor':'Technical Services',
'tr':'On Order',
'tre':'Electronic Reserves',
'trp':'Reserves Tutt 1st Floor',
'trrm':'Reserves Tutt 1st Floor',
'trs':'Reserves Tutt 1st Floor',
'trsdo':'Reserves Tutt 1st Floor',
'trstv':'Reserves Tutt 1st Floor',
'td':'Tutt South Basement',
'tdo':'Tutt South Basement',
'tdcol':'Tutt South Basement',
'tde':'Tutt South Basement',
'tdea':'Tutt South Basement',
'tdem':'Tutt South Basement',
'tdemc':'Tutt South Basement',
'tdn':'Tutt South Basement',
'tdi':'Tutt 1st Floor South',
'tdm':'Tutt 1st Floor South',
'tdmt':'Tutt 1st Floor South',
'tdmf':'Tutt 1st Floor South',
'tdmi':'Microforms Tutt 2nd Floor',
'tdof':'Tutt 1st Floor South',
'tdscs':'Tutt 1st Floor South',
'tmaps':'Tutt North 2nd Floor Map File',
'tmi':'Microforms Tutt 2nd Floor',
'tmic':'Microforms Tutt 2nd Floor',
'tmico':'Microforms Tutt 2nd Floor',
'tmics':'Microforms Tutt 2nd Floor',
'tmifs':'Microforms Tutt 2nd Floor',
'tt':'Tutt North Basement Theses',
'ttlc':'Teaching Learning Center',
'ttla':'Tutt North Basement Lit Award',
'xfb':'Fine Arts Center',
'xm':'Music Library Books and Scores',
'xmcas':'Music Library Cassette',
'xmcat':'Music Library Catalog Office',
'xmcd':'Music Library CD-ROM',
'xmcir':'Music Library Circ Desk',
'xmdvd':'Music Library DVD',
'xmedd':'Music Library Education DVD',
'xmh':'Music Library -NC Books and Scores',
'xmhs':'Music Library -NC Storage',
'xmhsm':'Music Library -NC Music Mini Scores',
'xmins':'Music Lib. -Instrument Storage',
'xmld':'Music Library',
'xmlp1':'Music Library LP/SLP Room',
'xmmcd':'Music Library Music CD',
'xmmi':'Music Library Microforms',
'xmo':'Music Library Oversize',
'xmr':'Music Library Reference',
'xmper':'Music Library Periodicals',
'xms':'Music Library Storage',
'xmr':'Music Library Reference',
'xmrt':'Music Library Reel Tape',
'xmrs':'Music Library Reserves',
'xmscm':'Music Library MiniScores',
'xmv':'Music Library Video',
'tper':'Periodicals Tutt 2nd Floor',
'tpero':'Periodicals Tutt 2nd Floor',
'tdmo':'Oversize Tutt 2nd Floor',
'tf':'Folio Tutt 3rd Floor',
'to':'Oversize Tutt 3rd Floor',
'tsa':'Special Collections Audio',
'tlr':'Special Collections Lincoln Room',
'tlrc':'Special Collections Lincoln Room Small',
'tlrf':'Special Collections Lincoln Room Folio',
'tlro':'Special Collections Lincoln Room Oversize',
'tlrp':'Special Collections Lincoln Room Periodical',
'tlrpa':'Special Collections Lincoln Room Pamphlet',
'tscc':'Special Collections CC Room',
'tsccf':'Special Collections CC Room Folio',
'tscco':'Special Collections CC Room Oversize',
'tsccp':'Special Collections CC Room Periodical ',
'tsnv':'Special Collections CC Room Pamphlet ',
'tsco':'Special Collections Colorado Room',
'tscof':'Special Collections Colorado Room Folio',
'tscoo':'Special Collections Colorado Room Oversize',
'tscop':'Special Collections Colorado Room Periodical',
'tsm':'Special Collections Maps',
'tsmi':'Special Collections Microform',
'tsmf':'Special Collections Manuscripts File',
'tsms':'Special Collections Manuscripts',
'tsof':'Special Collections Offices',
'tsra':'Special Collections Rare',
'tsraf':'Special Collections Rare Folio',
'tsrao':'Special Collections Rare Oversize',
'tsrat':'Special Collections Rare Small',
'tsse':'Special Collections Special Editions',
'tssef':'Special Collections Special Editions Folio',
'tsseo':'Special Collections Special Editions Oversize',
'tsset':'Special Collections Special Editions Small',
'tssto':'Special Collections Storage Basement',
'tsv':'Special Collections Videos',
'tv':'Videos-Tutt 2nd Floor North',
'tvc':'Video-Tutt Circulation Desk',
'tvdvd':'DVD-Tutt Circulation Desk',
'xbaca':'Baca Campus',
'xfb':'Fine Arts Center',
'xfcab':'Fine Arts Center',
'xfdsk':'Fine Arts Center',
'xfdvd':'Fine Arts Center',
'xffil':'Fine Arts Center',
'xfup':'Fine Arts Center',
'xfo':'Fine Arts Center',
'xfv':'Fine Arts Center',
'xsan':'Anthropology Seminar-Barnes',
'xsby':'Biology Seminar-Olin',
'xsbyo':'Biology Seminar-Olin',
'xsch':'Barnes Chemistry Library',
'xscho':'Barnes Chemistry Library',
'xsed':'Education Dept. (Mierow)',
'xsedc':'Education Dept. (Mierow)',
'xsedl':'Education Dept. Computer Lab',
'xsedp':'Education Dept. (Mierow)',
'xserc':'Environmental Science Seminar',
'xsetv':'Environmental Science Seminar',
'xsgeo':'Geology Map Room',
'xsgix':'Keck GIS Commons',
'xsgm':'Geology Map Room',
'xsmat':'Math Seminar-Palmer',
'xsph':'Physics Seminar-Barnes',
'xspho':'Physics Seminar-Barnes',
'xsps':'Political Science Seminar-Palmer',
'xsrus':'Russian Seminar-Armstrong',
'xwebb':'Penrose Hospital',
}
LOCATION_CODE_MAP = {
'ewww':'Online',
'ewwwd':'Online',
'ewwwp':'Online',
'ewwwn':'Online',
'tarf':'Tutt Reference',
'tarfa':'Tutt 1st Floor South',
'tarfc':'Tutt Reference',
'tarfd':'Tutt Reference Desk',
'tarfg':'Tutt Reference Desk',
'tarfi':'Tutt Reference',
'tarfm':'Tutt Reference North 2nd Floor',
'tarfo':'Tutt Reference North 2nd Floor',
'tban':'Art Reading Room Tutt 2nd Floor South',
'tbanf':'Art Reading Room Tutt 2nd Floor South',
'tb':'Tutt 3rd Floor',
'tbnc':'Tutt 3rd Floor',
'tbndp':'Tutt Display',
'tbnew':'Tutt New Arrivals',
'tbp':'Tutt North Basement',
'tbpnc':'Tutt North Basement',
'tcat':'Tutt Cataloging Office',
'tcas':'CDROM/Cassette Tutt Circulation Desk',
'tcbs':'Tutt Leisure Reading',
'tcurr':'Tutt 3rd Floor',
'tdacd':'Government Documents CD-ROM index',
'tfly':'On The Fly',
'tgr':'Tutt North Basement',
'tor':'Technical Services',
'tr':'On Order',
'tre':'Electronic Reserves',
'trp':'Reserves Tutt 1st Floor',
'trrm':'Reserves Tutt 1st Floor',
'trs':'Reserves Tutt 1st Floor',
'trsdo':'Reserves Tutt 1st Floor',
'trstv':'Reserves Tutt 1st Floor',
'td':'Tutt South Basement',
'tdo':'Tutt South Basement',
'tdcol':'Tutt South Basement',
'tde':'Tutt South Basement',
'tdea':'Tutt South Basement',
'tdem':'Tutt South Basement',
'tdemc':'Tutt South Basement',
'tdn':'Tutt South Basement',
'tdi':'Tutt 1st Floor South',
'tdm':'Tutt 1st Floor South',
'tdmt':'Tutt 1st Floor South',
'tdmf':'Tutt 1st Floor South',
'tdmi':'Microforms Tutt 2nd Floor',
'tdof':'Tutt 1st Floor South',
'tdscs':'Tutt 1st Floor South',
'tmaps':'Tutt North 2nd Floor Map File',
'tmi':'Microforms Tutt 2nd Floor',
'tmic':'Microforms Tutt 2nd Floor',
'tmico':'Microforms Tutt 2nd Floor',
'tmics':'Microforms Tutt 2nd Floor',
'tmifs':'Microforms Tutt 2nd Floor',
'tt':'Tutt North Basement Theses',
'ttlc':'Teaching Learning Center',
'ttla':'Tutt North Basement Lit Award',
'xfb':'Fine Arts Center',
'xm':'Music Library Books and Scores',
'xmcas':'Music Library Cassette',
'xmcat':'Music Library Catalog Office',
'xmcd':'Music Library CD-ROM',
'xmcir':'Music Library Circ Desk',
'xmdvd':'Music Library DVD',
'xmedd':'Music Library Education DVD',
'xmh':'Music Library -NC Books and Scores',
'xmhs':'Music Library -NC Storage',
'xmhsm':'Music Library -NC Music Mini Scores',
'xmins':'Music Lib. -Instrument Storage',
'xmld':'Music Library',
'xmlp1':'Music Library LP/SLP Room',
'xmmcd':'Music Library Music CD',
'xmmi':'Music Library Microforms',
'xmo':'Music Library Oversize',
'xmr':'Music Library Reference',
'xmper':'Music Library Periodicals',
'xms':'Music Library Storage',
'xmr':'Music Library Reference',
'xmrt':'Music Library Reel Tape',
'xmrs':'Music Library Reserves',
'xmscm':'Music Library MiniScores',
'xmv':'Music Library Video',
'tper':'Periodicals Tutt 2nd Floor',
'tpero':'Periodicals Tutt 2nd Floor',
'tdmo':'Oversize Tutt 2nd Floor',
'tf':'Folio Tutt 3rd Floor',
'to':'Oversize Tutt 3rd Floor',
'tsa':'Special Collections Audio',
'tlr':'Special Collections Lincoln Room',
'tlrc':'Special Collections Lincoln Room',
'tlrf':'Special Collections Lincoln Room',
'tlro':'Special Collections Lincoln Room',
'tlrp':'Special Collections Lincoln Room',
'tlrpa':'Special Collections Lincoln Room',
'tscc':'Special Collections CC Room',
'tsccf':'Special Collections CC Room',
'tscco':'Special Collections CC Room',
'tsccp':'Special Collections CC Room',
'tsnv':'Special Collections CC Room',
'tsco':'Special Collections Colorado Room',
'tscof':'Special Collections Colorado Room',
'tscoo':'Special Collections Colorado Room',
'tscop':'Special Collections Colorado Room',
'tsm':'Special Collections Maps',
'tsmi':'Special Collections Microform',
'tsmf':'Special Collections Manuscripts',
'tsms':'Special Collections Manuscripts',
'tsof':'Special Collections Offices',
'tsra':'Special Collections Rare',
'tsraf':'Special Collections Rare',
'tsrao':'Special Collections Rare',
'tsrat':'Special Collections Rare',
'tsse':'Special Collections Special Editions',
'tssef':'Special Collections Special Editions',
'tsseo':'Special Collections Special Editions',
'tsset':'Special Collections Special Editions',
'tssto':'Special Collections Storage Basement',
'tsv':'Special Collections Videos',
'tv':'Videos-Tutt 2nd Floor North',
'tvc':'Video-Tutt Circulation Desk',
'tvdvd':'DVD-Tutt Circulation Desk',
'xbaca':'Baca Campus',
'xfb':'Fine Arts Center',
'xfcab':'Fine Arts Center',
'xfdsk':'Fine Arts Center',
'xfdvd':'Fine Arts Center',
'xffil':'Fine Arts Center',
'xfup':'Fine Arts Center',
'xfo':'Fine Arts Center',
'xfv':'Fine Arts Center',
'xsan':'Anthropology Seminar-Barnes',
'xsby':'Biology Seminar-Olin',
'xsbyo':'Biology Seminar-Olin',
'xsch':'Barnes Chemistry Library',
'xscho':'Barnes Chemistry Library',
'xsed':'Education Dept. (Mierow)',
'xsedc':'Education Dept. (Mierow)',
'xsedl':'Education Dept. Computer Lab',
'xsedp':'Education Dept. (Mierow)',
'xserc':'Environmental Science Seminar',
'xsetv':'Environmental Science Seminar',
'xsgeo':'Geology Map Room',
'xsgix':'Keck GIS Commons',
'xsgm':'Geology Map Room',
'xsmat':'Math Seminar-Palmer',
'xsph':'Physics Seminar-Barnes',
'xspho':'Physics Seminar-Barnes',
'xsps':'Political Science Seminar-Palmer',
'xsrus':'Russian Seminar-Armstrong',
'xwebb':'Penrose Hospital',
}
GOVDOCS_COLLECTIONS = {'tdacd':'Government Documents CD-ROM index',
'td':'Tutt South Basement',
'tdi':'Tutt 1st Floor South',
'tdm':'Tutt 1st Floor South',
'tdmf':'Tutt 1st Floor South',
'tdmi':'Microforms Tutt 2nd Floor',
'tdmo':'Oversize Tutt 2nd Floor',
'tdmt':'Tutt 1st Floor South',
'tdn':'Tutt South Basement',
'tdof':'Tutt 1st Floor South',
'tdo':'Tutt South Basement',
'tdcol':'Tutt South Basement',
'tde':'Tutt South Basement',
'tdea':'Tutt South Basement',
'ewwwd':'Online',
'tdem':'Tutt South Basement',
'tdemc':'Tutt South Basement'}
SPECIAL_COLLECTIONS = {'tsa':'Special Collections Audio',
'tlr':'Special Collections Lincoln Room',
'tlrc':'Special Collections Lincoln Room',
'tlrf':'Special Collections Lincoln Room',
'tlro':'Special Collections Lincoln Room',
'tlrp':'Special Collections Lincoln Room',
'tlrpa':'Special Collections Lincoln Room',
'tscc':'Special Collections CC Room',
'tsccf':'Special Collections CC Room',
'tscco':'Special Collections CC Room',
'tsccp':'Special Collections CC Room',
'tsnv':'Special Collections CC Room',
'tsco':'Special Collections Colorado Room',
'tscof':'Special Collections Colorado Room',
'tscoo':'Special Collections Colorado Room',
'tscop':'Special Collections Colorado Room',
'tsm':'Special Collections Maps',
'tsmi':'Special Collections Microform',
'tsmf':'Special Collections Manuscripts',
'tsms':'Special Collections Manuscripts',
'tsof':'Special Collections Offices',
'tsra':'Special Collections Rare',
'tsraf':'Special Collections Rare',
'tsrao':'Special Collections Rare',
'tsrat':'Special Collections Rare',
'tsse':'Special Collections Special Editions',
'tssef':'Special Collections Special Editions',
'tsseo':'Special Collections Special Editions',
'tsset':'Special Collections Special Editions',
'tssto':'Special Collections Storage Basement',
'tsv':'Special Collections Videos'}
| 40.292683
| 70
| 0.615079
| 1,620
| 14,868
| 5.641358
| 0.169136
| 0.185141
| 0.038079
| 0.035343
| 0.952402
| 0.907867
| 0.904147
| 0.904147
| 0.896816
| 0.896816
| 0
| 0.006439
| 0.23749
| 14,868
| 368
| 71
| 40.402174
| 0.799682
| 0.002085
| 0
| 0.913165
| 0
| 0
| 0.673118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
da7b0792345cc3595ae95e255d7389752ef942d7
| 9,795
|
py
|
Python
|
tests/collections/common/test_spc_tokenizer.py
|
madhukarkm/NeMo
|
648c97f076147684bee6aaada209f2f20adcaf5d
|
[
"Apache-2.0"
] | 4,145
|
2019-09-13T08:29:43.000Z
|
2022-03-31T18:31:44.000Z
|
tests/collections/common/test_spc_tokenizer.py
|
madhukarkm/NeMo
|
648c97f076147684bee6aaada209f2f20adcaf5d
|
[
"Apache-2.0"
] | 2,031
|
2019-09-17T16:51:39.000Z
|
2022-03-31T23:52:41.000Z
|
tests/collections/common/test_spc_tokenizer.py
|
madhukarkm/NeMo
|
648c97f076147684bee6aaada209f2f20adcaf5d
|
[
"Apache-2.0"
] | 1,041
|
2019-09-13T10:08:21.000Z
|
2022-03-30T06:37:38.000Z
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceTokenizer
from nemo.collections.common.tokenizers.youtokentome_tokenizer import YouTokenToMeTokenizer
MODEL_SPECIAL_TOKENS = {
'unk_token': '[UNK]',
'sep_token': '[SEP]',
'pad_token': '[PAD]',
'bos_token': '[CLS]',
'mask_token': '[MASK]',
'eos_token': '[SEP]',
'cls_token': '[CLS]',
}
class TestSentencePieceTokenizerLegacy:
model_name = "/m_common.model"
@pytest.mark.unit
def test_add_special_tokens(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name, legacy=True)
special_tokens = MODEL_SPECIAL_TOKENS
tokenizer.add_special_tokens(special_tokens)
assert tokenizer.vocab_size == tokenizer.original_vocab_size + len(set(special_tokens.values()))
@pytest.mark.unit
def test_text_to_tokens(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name, legacy=True)
special_tokens = MODEL_SPECIAL_TOKENS
tokenizer.add_special_tokens(special_tokens)
text = "[CLS] a b c [MASK] e f [SEP] g h i [SEP]"
tokens = tokenizer.text_to_tokens(text)
assert len(tokens) == len(text.split())
assert tokens.count("[CLS]") == 1
assert tokens.count("[MASK]") == 1
assert tokens.count("[SEP]") == 2
@pytest.mark.unit
def test_tokens_to_text(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name, legacy=True)
text = "[CLS] a b c [MASK] e f [SEP] g h i [SEP]"
tokens = tokenizer.text_to_tokens(text)
result = tokenizer.tokens_to_text(tokens)
assert text == result
@pytest.mark.unit
def test_text_to_ids(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name, legacy=True)
special_tokens = MODEL_SPECIAL_TOKENS
tokenizer.add_special_tokens(special_tokens)
text = "[CLS] a b c [MASK] e f [SEP] g h i [SEP]"
ids = tokenizer.text_to_ids(text)
assert len(ids) == len(text.split())
assert ids.count(tokenizer.token_to_id("[CLS]")) == 1
assert ids.count(tokenizer.token_to_id("[MASK]")) == 1
assert ids.count(tokenizer.token_to_id("[SEP]")) == 2
@pytest.mark.unit
def test_ids_to_text(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name, legacy=True)
special_tokens = MODEL_SPECIAL_TOKENS
tokenizer.add_special_tokens(special_tokens)
text = "[CLS] a b c [MASK] e f [SEP] g h i [SEP]"
ids = tokenizer.text_to_ids(text)
result = tokenizer.ids_to_text(ids)
assert text == result
@pytest.mark.unit
def test_tokens_to_ids(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name, legacy=True)
special_tokens = MODEL_SPECIAL_TOKENS
tokenizer.add_special_tokens(special_tokens)
text = "[CLS] a b c [MASK] e f [SEP] g h i [SEP]"
tokens = tokenizer.text_to_tokens(text)
ids = tokenizer.tokens_to_ids(tokens)
assert len(ids) == len(tokens)
assert ids.count(tokenizer.token_to_id("[CLS]")) == 1
assert ids.count(tokenizer.token_to_id("[MASK]")) == 1
assert ids.count(tokenizer.token_to_id("[SEP]")) == 2
@pytest.mark.unit
def test_ids_to_tokens(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name, legacy=True)
special_tokens = MODEL_SPECIAL_TOKENS
tokenizer.add_special_tokens(special_tokens)
text = "[CLS] a b c [MASK] e f [SEP] g h i [SEP]"
tokens = tokenizer.text_to_tokens(text)
ids = tokenizer.tokens_to_ids(tokens)
result = tokenizer.ids_to_tokens(ids)
assert len(result) == len(tokens)
for i in range(len(result)):
assert result[i] == tokens[i]
class TestSentencePieceTokenizer:
model_name = "/m_new.model"
@pytest.mark.unit
def test_text_to_tokens(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name)
# <cls> is user_defined_symbol in the test tokenizer model
# <unk>, <sep>, <s>, and </s> are control symbols
text = "<cls> a b c <sep> e f g h i </s>"
tokens = tokenizer.text_to_tokens(text)
assert tokens.count("<cls>") == 1
assert tokens.count("<sep>") == 0
assert tokens.count("</s>") == 0
@pytest.mark.unit
def test_tokens_to_text(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name)
# <cls> is user_defined_symbol in the test tokenizer model
text = "<cls> a b c e f g h i"
tokens = tokenizer.text_to_tokens(text)
result = tokenizer.tokens_to_text(tokens)
assert text == result
@pytest.mark.unit
def test_text_to_ids(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name)
# <cls> is user_defined_symbol in the test tokenizer model
# <unk>, <sep>, <s>, and </s> are control symbols
text = "<cls> a b c <sep> e f g h i </s>"
tokens = tokenizer.text_to_ids(text)
assert tokens.count(tokenizer.token_to_id("<cls>")) == 1
assert tokens.count(tokenizer.token_to_id("<sep>")) == 0
assert tokens.count(tokenizer.token_to_id("</s>")) == 0
@pytest.mark.unit
def test_ids_to_text(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name)
text = "<cls> a b c <sep> e f g h i </s>"
ids = tokenizer.text_to_ids(text)
result = tokenizer.ids_to_text(ids)
assert text == result
@pytest.mark.unit
def test_tokens_to_ids(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name)
tokens = ["<cls>", "a", "b", "c", "<sep>", "e", "f", "<sep>", "g", "h", "i", "</s>"]
ids = tokenizer.tokens_to_ids(tokens)
assert len(ids) == len(tokens)
assert ids.count(tokenizer.token_to_id("<cls>")) == 1
assert ids.count(tokenizer.token_to_id("</s>")) == 1
assert ids.count(tokenizer.token_to_id("<sep>")) == 2
@pytest.mark.unit
def test_ids_to_tokens(self, test_data_dir):
tokenizer = SentencePieceTokenizer(test_data_dir + self.model_name)
tokens = ["<cls>", "a", "b", "c", "<sep>", "e", "f", "<sep>", "g", "h", "i", "</s>"]
ids = tokenizer.tokens_to_ids(tokens)
result = tokenizer.ids_to_tokens(ids)
assert len(result) == len(tokens)
for i in range(len(result)):
assert result[i] == tokens[i]
class TestYouTokenToMeTokenizer:
model_name = "/yttm.4096.en-de.model"
@pytest.mark.unit
def test_text_to_tokens(self, test_data_dir):
tokenizer = YouTokenToMeTokenizer(test_data_dir + self.model_name)
text = "<BOS> a b c e <UNK> f g h i <EOS>"
tokens = tokenizer.text_to_tokens(text)
assert tokens.count("<BOS>") == 0
assert tokens.count("<UNK>") == 0
assert tokens.count("<EOS>") == 0
@pytest.mark.unit
def test_tokens_to_text(self, test_data_dir):
tokenizer = YouTokenToMeTokenizer(test_data_dir + self.model_name)
text = "a b c e f g h i"
tokens = tokenizer.text_to_tokens(text)
result = tokenizer.tokens_to_text(tokens)
assert text == result
@pytest.mark.unit
def test_text_to_ids(self, test_data_dir):
tokenizer = YouTokenToMeTokenizer(test_data_dir + self.model_name)
text = "<BOS> a b c <UNK> e f g h i <EOS>"
tokens = tokenizer.text_to_ids(text)
assert tokens.count(tokenizer.bos_id) == 0
assert tokens.count(tokenizer.unk_id) == 0
assert tokens.count(tokenizer.eos_id) == 0
@pytest.mark.unit
def test_ids_to_text(self, test_data_dir):
tokenizer = YouTokenToMeTokenizer(test_data_dir + self.model_name)
text = "a b c e f g h i"
ids = tokenizer.text_to_ids(text)
result = tokenizer.ids_to_text(ids)
assert text == result
@pytest.mark.unit
def test_tokens_to_ids(self, test_data_dir):
tokenizer = YouTokenToMeTokenizer(test_data_dir + self.model_name)
tokens = ["<BOS>", "a", "b", "c", "<UNK>", "e", "f", "<UNK>", "g", "h", "i", "<EOS>"]
ids = tokenizer.tokens_to_ids(tokens)
assert len(ids) == len(tokens)
assert ids.count(tokenizer.bos_id) == 1
assert ids.count(tokenizer.eos_id) == 1
assert ids.count(tokenizer.unk_id) == 2
@pytest.mark.unit
def test_ids_to_tokens(self, test_data_dir):
tokenizer = YouTokenToMeTokenizer(test_data_dir + self.model_name)
tokens = ["<BOS>", "a", "b", "c", "<UNK>", "e", "f", "<UNK>", "g", "h", "i", "<EOS>"]
ids = tokenizer.tokens_to_ids(tokens)
result = tokenizer.ids_to_tokens(ids)
assert len(result) == len(tokens)
for i in range(len(result)):
assert result[i] == tokens[i]
| 36.548507
| 104
| 0.646452
| 1,360
| 9,795
| 4.433824
| 0.102206
| 0.050415
| 0.06932
| 0.053566
| 0.832836
| 0.813433
| 0.789221
| 0.775954
| 0.763018
| 0.748259
| 0
| 0.005155
| 0.227667
| 9,795
| 267
| 105
| 36.685393
| 0.791937
| 0.086779
| 0
| 0.747253
| 0
| 0
| 0.09153
| 0.002465
| 0
| 0
| 0
| 0
| 0.247253
| 1
| 0.104396
| false
| 0
| 0.016484
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16f6956bf25a2e0cdba859005e5bea2765d980e0
| 3,608
|
py
|
Python
|
2A_Working_with_Data/3_Using_datasets.py
|
aditagrawal/Azure_ML
|
30d7021c19aef8f56b05580cba25d38a5bc0b24e
|
[
"Unlicense"
] | null | null | null |
2A_Working_with_Data/3_Using_datasets.py
|
aditagrawal/Azure_ML
|
30d7021c19aef8f56b05580cba25d38a5bc0b24e
|
[
"Unlicense"
] | null | null | null |
2A_Working_with_Data/3_Using_datasets.py
|
aditagrawal/Azure_ML
|
30d7021c19aef8f56b05580cba25d38a5bc0b24e
|
[
"Unlicense"
] | null | null | null |
### Work with tabular datasets-
df = tab_ds.to_pandas_dataframe()
# code to work with dataframe goes here, for example:
print(df.head())
### Pass a tabular dataset to an experiment script
## Use a script argument for a tabular dataset
# ScriptRunConfig:
env = Environment('my_env')
packages = CondaDependencies.create(conda_packages=['pip'],
pip_packages=['azureml-defaults',
'azureml-dataprep[pandas]'])
env.python.conda_dependencies = packages
script_config = ScriptRunConfig(source_directory='my_dir',
script='script.py',
arguments=['--ds', tab_ds],
environment=env)
#Script:
from azureml.core import Run, Dataset
parser.add_argument('--ds', type=str, dest='dataset_id')
args = parser.parse_args()
run = Run.get_context()
ws = run.experiment.workspace
dataset = Dataset.get_by_id(ws, id=args.dataset_id)
data = dataset.to_pandas_dataframe()
## Use a named input for a tabular dataset
# ScriptRunConfig:
env = Environment('my_env')
packages = CondaDependencies.create(conda_packages=['pip'],
pip_packages=['azureml-defaults',
'azureml-dataprep[pandas]'])
env.python.conda_dependencies = packages
script_config = ScriptRunConfig(source_directory='my_dir',
script='script.py',
arguments=['--ds', tab_ds.as_named_input('my_dataset')],
environment=env)
# Script:
from azureml.core import Run
parser.add_argument('--ds', type=str, dest='ds_id')
args = parser.parse_args()
run = Run.get_context()
dataset = run.input_datasets['my_dataset']
data = dataset.to_pandas_dataframe()
### Work with file datasets
for file_path in file_ds.to_path():
print(file_path)
### Pass a file dataset to an experiment script
## Use a script argument for a file dataset
# ScriptRunConfig:
env = Environment('my_env')
packages = CondaDependencies.create(conda_packages=['pip'],
pip_packages=['azureml-defaults',
'azureml-dataprep[pandas]'])
env.python.conda_dependencies = packages
script_config = ScriptRunConfig(source_directory='my_dir',
script='script.py',
arguments=['--ds', file_ds.as_download()],
environment=env)
# Script:
from azureml.core import Run
import glob
parser.add_argument('--ds', type=str, dest='ds_ref')
args = parser.parse_args()
run = Run.get_context()
imgs = glob.glob(args.ds_ref + "/*.jpg")
## Use a named input for a file dataset
# ScriptRunConfig:
env = Environment('my_env')
packages = CondaDependencies.create(conda_packages=['pip'],
pip_packages=['azureml-defaults',
'azureml-dataprep[pandas]'])
env.python.conda_dependencies = packages
script_config = ScriptRunConfig(source_directory='my_dir',
script='script.py',
arguments=['--ds', file_ds.as_named_input('my_ds').as_download()],
environment=env)
# Script:
from azureml.core import Run
import glob
parser.add_argument('--ds', type=str, dest='ds_ref')
args = parser.parse_args()
run = Run.get_context()
dataset = run.input_datasets['my_ds']
imgs= glob.glob(dataset + "/*.jpg")
| 30.319328
| 98
| 0.598947
| 400
| 3,608
| 5.215
| 0.185
| 0.017258
| 0.047939
| 0.069032
| 0.848514
| 0.809204
| 0.795781
| 0.7814
| 0.723873
| 0.705177
| 0
| 0
| 0.284922
| 3,608
| 118
| 99
| 30.576271
| 0.808527
| 0.126109
| 0
| 0.742424
| 0
| 0
| 0.114313
| 0.03074
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0.030303
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5bfe77e78267f578e3c2adee5accdfac30bfc6c
| 8,174
|
py
|
Python
|
tests/test_optionStrategy.py
|
IanMadlenya/kaleidoscope
|
be3e28c1b4b958f81d8d67df03abebbebcf7965a
|
[
"MIT"
] | 1
|
2020-08-30T15:29:04.000Z
|
2020-08-30T15:29:04.000Z
|
tests/test_optionStrategy.py
|
IanMadlenya/kaleidoscope
|
be3e28c1b4b958f81d8d67df03abebbebcf7965a
|
[
"MIT"
] | null | null | null |
tests/test_optionStrategy.py
|
IanMadlenya/kaleidoscope
|
be3e28c1b4b958f81d8d67df03abebbebcf7965a
|
[
"MIT"
] | 2
|
2019-09-18T07:13:32.000Z
|
2020-11-20T18:15:18.000Z
|
from unittest import TestCase
from kaleidoscope.options.option_query import OptionQuery
from kaleidoscope.options.option_strategies import OptionStrategies
from kaleidoscope.options.option_strategy import OptionStrategy
from kaleidoscope.options.option import Option
from kaleidoscope.globals import OptionType
from kaleidoscope.datafeeds.sqlite_data import SQLiteDataFeed
class TestOptionStrategy(TestCase):
def setUp(self):
self.op_test = OptionStrategy(chains=None)
def test_max_strike_width_one_strike(self):
self.op_test.strikes = [1]
max_width = self.op_test._max_strike_width()
self.assertEqual(0, max_width)
def test_max_strike_width_two_strikes(self):
self.op_test.strikes = [1, 2.5]
max_width = self.op_test._max_strike_width()
self.assertEqual(1.5, max_width)
def test_max_strike_width_three_strikes(self):
self.op_test.strikes = [1, 2.5, 4]
max_width = self.op_test._max_strike_width()
self.assertEqual(1.5, max_width)
self.op_test.strikes = [1, 2.5, 5]
max_width = self.op_test._max_strike_width()
self.assertEqual(2.5, max_width)
def test_max_strike_width_four_strikes(self):
self.op_test.strikes = [1, 2.5, 7, 5.5]
max_width = self.op_test._max_strike_width()
self.assertEqual(1.5, max_width)
self.op_test.strikes = [1, 2.5, 7.5, 5.5]
max_width = self.op_test._max_strike_width()
self.assertEqual(2, max_width)
def test_max_strike_width_invalid_strikes(self):
self.op_test.strikes = [1, 2.5, 4, 5.5, 5]
self.assertRaises(ValueError, lambda: self.op_test._max_strike_width())
self.op_test.strikes = []
self.assertRaises(ValueError, lambda: self.op_test._max_strike_width())
def test_map_vertical(self):
# test case for call spread
sym_1 = ".VXX160219C00030000-.VXX160219C00035000"
self.op_test.legs = self.op_test._map(sym_1)
# expected value
s_option = Option("VXX", "160219", "C", "00030000")
l_option = Option("VXX", "160219", "C", "00035000")
# test option leg
self.assertEqual(len(self.op_test.legs), 2)
self.assertEqual(1, self.op_test.legs[0]['quantity'])
self.assertEqual(-1, self.op_test.legs[1]['quantity'])
# test option in option legs
self.assertEqual("VXX160219C00030000", self.op_test.legs[0]['contract'].symbol)
self.assertEqual("VXX160219C00035000", self.op_test.legs[1]['contract'].symbol)
self.assertEqual("2016-02-19", self.op_test.legs[0]['contract'].expiration)
self.assertEqual("2016-02-19", self.op_test.legs[1]['contract'].expiration)
self.assertEqual("C", self.op_test.legs[0]['contract'].option_type)
self.assertEqual("C", self.op_test.legs[1]['contract'].option_type)
self.assertEqual(30.0, self.op_test.legs[0]['contract'].strike)
self.assertEqual(35.0, self.op_test.legs[1]['contract'].strike)
def test_map_butterfly(self):
# test case for call spread
sym_1 = ".VXX160219C00030000-2*.VXX160219C00035000+.VXX160219C00040000"
self.op_test.legs = self.op_test._map(sym_1)
# expected value
l1_option = Option("VXX", "160219", "C", "00030000")
s_option = Option("VXX", "160219", "C", "00035000")
l2_option = Option("VXX", "160219", "C", "00040000")
# test option leg
self.assertEqual(len(self.op_test.legs), 3)
self.assertEqual(1, self.op_test.legs[0]['quantity'])
self.assertEqual(-2, self.op_test.legs[1]['quantity'])
self.assertEqual(1, self.op_test.legs[2]['quantity'])
# test option in option legs
self.assertEqual("VXX160219C00030000", self.op_test.legs[0]['contract'].symbol)
self.assertEqual("VXX160219C00035000", self.op_test.legs[1]['contract'].symbol)
self.assertEqual("VXX160219C00040000", self.op_test.legs[2]['contract'].symbol)
self.assertEqual("2016-02-19", self.op_test.legs[0]['contract'].expiration)
self.assertEqual("2016-02-19", self.op_test.legs[1]['contract'].expiration)
self.assertEqual("2016-02-19", self.op_test.legs[2]['contract'].expiration)
self.assertEqual("C", self.op_test.legs[0]['contract'].option_type)
self.assertEqual("C", self.op_test.legs[1]['contract'].option_type)
self.assertEqual("C", self.op_test.legs[2]['contract'].option_type)
self.assertEqual(30.0, self.op_test.legs[0]['contract'].strike)
self.assertEqual(35.0, self.op_test.legs[1]['contract'].strike)
self.assertEqual(40.0, self.op_test.legs[2]['contract'].strike)
def test_map_iron_condor(self):
# test case for call spread
sym_1 = ".VXX160219C00030000-.VXX160219C00035000+.VXX160219P00045000-.VXX160219P00040000"
self.op_test.legs = self.op_test._map(sym_1)
# expected value
l1_option = Option("VXX", "160219", "C", "00030000")
s1_option = Option("VXX", "160219", "C", "00035000")
l2_option = Option("VXX", "160219", "P", "00045000")
s2_option = Option("VXX", "160219", "P", "00040000")
# test option leg
self.assertEqual(len(self.op_test.legs), 4)
self.assertEqual(1, self.op_test.legs[0]['quantity'])
self.assertEqual(-1, self.op_test.legs[1]['quantity'])
self.assertEqual(1, self.op_test.legs[2]['quantity'])
self.assertEqual(-1, self.op_test.legs[3]['quantity'])
# test option in option legs
self.assertEqual("VXX160219C00030000", self.op_test.legs[0]['contract'].symbol)
self.assertEqual("VXX160219C00035000", self.op_test.legs[1]['contract'].symbol)
self.assertEqual("VXX160219P00045000", self.op_test.legs[2]['contract'].symbol)
self.assertEqual("VXX160219P00040000", self.op_test.legs[3]['contract'].symbol)
self.assertEqual("2016-02-19", self.op_test.legs[0]['contract'].expiration)
self.assertEqual("2016-02-19", self.op_test.legs[1]['contract'].expiration)
self.assertEqual("2016-02-19", self.op_test.legs[2]['contract'].expiration)
self.assertEqual("2016-02-19", self.op_test.legs[3]['contract'].expiration)
self.assertEqual("C", self.op_test.legs[0]['contract'].option_type)
self.assertEqual("C", self.op_test.legs[1]['contract'].option_type)
self.assertEqual("P", self.op_test.legs[2]['contract'].option_type)
self.assertEqual("P", self.op_test.legs[3]['contract'].option_type)
self.assertEqual(30.0, self.op_test.legs[0]['contract'].strike)
self.assertEqual(35.0, self.op_test.legs[1]['contract'].strike)
self.assertEqual(45.0, self.op_test.legs[2]['contract'].strike)
self.assertEqual(40.0, self.op_test.legs[3]['contract'].strike)
def test_nearest_mark_vertical(self):
self.datafeed = SQLiteDataFeed()
self.data = self.datafeed.get("VXX", start="2016-02-19", end="2016-02-19")
# filter the data for one quote date
data = self.data.loc[self.data['quote_date'] == "2016-02-19"]
chains = OptionStrategies.vertical(OptionQuery(data), width=2, option_type=OptionType.CALL)
result = chains.nearest_mark(0.5)
self.assertEqual(result.expirations, ["2016-02-19"])
self.assertEqual(result.strikes, [25.0, 27.0])
self.assertEqual(result.underlying_symbol, "VXX")
self.assertEqual(result.name, "Vertical")
def test_nearest_mark_iron_condor(self):
# initialize OptionStrategy class
datafeed = SQLiteDataFeed()
data = datafeed.get("VXX", start="2016-02-19", end="2016-02-19")
# filter the data for one quote date
data = data.loc[data['quote_date'] == "2016-02-19"]
chains = OptionStrategies.iron_condor(OptionQuery(data), width=2, c_width=2, p_width=2)
result = chains.nearest_mark(0.5)
self.assertEqual(result.expirations, ["2016-02-19"])
self.assertEqual(result.strikes, [25.0, 27.0, 23.0, 21.0])
self.assertEqual(result.underlying_symbol, "VXX")
self.assertEqual(result.name, "Iron Condor")
| 45.921348
| 99
| 0.667115
| 1,107
| 8,174
| 4.757001
| 0.103884
| 0.080896
| 0.134827
| 0.135587
| 0.838207
| 0.809153
| 0.788264
| 0.776491
| 0.738891
| 0.710976
| 0
| 0.101343
| 0.180328
| 8,174
| 177
| 100
| 46.180791
| 0.684627
| 0.043186
| 0
| 0.447154
| 0
| 0
| 0.140038
| 0.022934
| 0
| 0
| 0
| 0
| 0.520325
| 1
| 0.089431
| false
| 0
| 0.056911
| 0
| 0.154472
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5d5ea6a51a639e87396db06c12ce432a69501fa
| 64
|
py
|
Python
|
src/__init__.py
|
meryemcode/Why-churn
|
ba19e52bf5b9786e955da3d03f99226183cd6c33
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
meryemcode/Why-churn
|
ba19e52bf5b9786e955da3d03f99226183cd6c33
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
meryemcode/Why-churn
|
ba19e52bf5b9786e955da3d03f99226183cd6c33
|
[
"MIT"
] | null | null | null |
from src import app
from src import data
from src import models
| 16
| 22
| 0.8125
| 12
| 64
| 4.333333
| 0.5
| 0.403846
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 64
| 3
| 23
| 21.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e5d9e66f608105b51e3ece23d61da1bddd128119
| 154
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_1/__init__.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_1/__init__.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_0/_pkg1_1_1_0_1/__init__.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from ._mod1_1_1_0_1_0 import *
from ._mod1_1_1_0_1_1 import *
from ._mod1_1_1_0_1_2 import *
from ._mod1_1_1_0_1_3 import *
from ._mod1_1_1_0_1_4 import *
| 30.8
| 30
| 0.811688
| 40
| 154
| 2.375
| 0.2
| 0.126316
| 0.473684
| 0.526316
| 0.884211
| 0.884211
| 0.757895
| 0
| 0
| 0
| 0
| 0.222222
| 0.123377
| 154
| 5
| 31
| 30.8
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
f91fd4885dcc4cc4e17efdc47c198aab05629de3
| 1,183
|
py
|
Python
|
Profiles/models.py
|
CHESyrian/Syrians
|
8376e9bed6e3a03f536d8aacd523d630f6bc4345
|
[
"MIT"
] | null | null | null |
Profiles/models.py
|
CHESyrian/Syrians
|
8376e9bed6e3a03f536d8aacd523d630f6bc4345
|
[
"MIT"
] | null | null | null |
Profiles/models.py
|
CHESyrian/Syrians
|
8376e9bed6e3a03f536d8aacd523d630f6bc4345
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from My_Account.models import Sharing_Post, Sharing_Image
class Posts_Good( models.Model ):
Post = models.ForeignKey(Sharing_Post, on_delete=models.CASCADE, null=True)
username = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
Good_Date = models.DateTimeField(auto_now=False, auto_now_add=True)
def __str__(self):
return self.Post.id, self.username.username
class Posts_Amazing( models.Model ):
Post = models.ForeignKey(Sharing_Post, on_delete=models.CASCADE, null=True)
username = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
Amazing_Date = models.DateTimeField(auto_now=False, auto_now_add=True)
def __str__(self):
return self.Post.id, self.username.username
class Posts_Shares(models.Model):
Post = models.ForeignKey(Sharing_Post, on_delete=models.CASCADE, null=True)
username = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
Share_Date = models.DateTimeField(auto_now=False, auto_now_add=True)
def __str__(self):
return self.Post.id, self.username.username
| 39.433333
| 87
| 0.743872
| 162
| 1,183
| 5.191358
| 0.234568
| 0.11415
| 0.099881
| 0.149822
| 0.819263
| 0.819263
| 0.819263
| 0.819263
| 0.819263
| 0.819263
| 0
| 0
| 0.157227
| 1,183
| 30
| 88
| 39.433333
| 0.843531
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0.142857
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
0060f5cf1843bf947fa2f51aca0a6aae6727ccf2
| 59,792
|
py
|
Python
|
dohq_teamcity/api/build_queue_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 23
|
2018-10-19T07:28:45.000Z
|
2021-11-12T12:46:09.000Z
|
dohq_teamcity/api/build_queue_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 31
|
2018-10-16T05:53:11.000Z
|
2021-09-09T14:44:14.000Z
|
dohq_teamcity/api/build_queue_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 12
|
2018-10-28T23:00:17.000Z
|
2021-09-07T12:07:13.000Z
|
# coding: utf-8
"""
TeamCity REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2018.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
from dohq_teamcity.custom.base_model import TeamCityObject
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dohq_teamcity.models.agents import Agents # noqa: F401,E501
from dohq_teamcity.models.build import Build # noqa: F401,E501
from dohq_teamcity.models.build_cancel_request import BuildCancelRequest # noqa: F401,E501
from dohq_teamcity.models.builds import Builds # noqa: F401,E501
from dohq_teamcity.models.tags import Tags # noqa: F401,E501
class BuildQueueApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
base_name = 'BuildQueue'
def __init__(self, api_client=None):
self.api_client = api_client
def add_tags(self, build_locator, **kwargs): # noqa: E501
"""add_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_tags(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param Tags body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_tags_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_tags_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def cancel_build(self, build_locator, **kwargs): # noqa: E501
"""cancel_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_build(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:return: BuildCancelRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__cancel_build_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__cancel_build_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def cancel_build_0(self, queued_build_locator, **kwargs): # noqa: E501
"""cancel_build_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_build_0(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str queued_build_locator: (required)
:param BuildCancelRequest body:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__cancel_build_0_with_http_info(queued_build_locator, **kwargs) # noqa: E501
else:
(data) = self.__cancel_build_0_with_http_info(queued_build_locator, **kwargs) # noqa: E501
return data
def delete_build(self, queued_build_locator, **kwargs): # noqa: E501
"""delete_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_build(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str queued_build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_build_with_http_info(queued_build_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_build_with_http_info(queued_build_locator, **kwargs) # noqa: E501
return data
def delete_builds_experimental(self, **kwargs): # noqa: E501
"""delete_builds_experimental # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_builds_experimental(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param str fields:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_builds_experimental_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__delete_builds_experimental_with_http_info(**kwargs) # noqa: E501
return data
def get_build(self, queued_build_locator, **kwargs): # noqa: E501
"""get_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_build(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str queued_build_locator: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_build_with_http_info(queued_build_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_build_with_http_info(queued_build_locator, **kwargs) # noqa: E501
return data
def get_builds(self, **kwargs): # noqa: E501
"""get_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_builds(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_builds_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__get_builds_with_http_info(**kwargs) # noqa: E501
return data
def queue_new_build(self, **kwargs): # noqa: E501
"""queue_new_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.queue_new_build(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param Build body:
:param bool move_to_top:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__queue_new_build_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__queue_new_build_with_http_info(**kwargs) # noqa: E501
return data
def replace_builds(self, **kwargs): # noqa: E501
"""replace_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_builds(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param Builds body:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_builds_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__replace_builds_with_http_info(**kwargs) # noqa: E501
return data
def replace_tags(self, build_locator, **kwargs): # noqa: E501
"""replace_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_tags(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str locator:
:param Tags body:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_tags_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_tags_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def serve_build_field_by_build_only(self, build_locator, field, **kwargs): # noqa: E501
"""serve_build_field_by_build_only # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_field_by_build_only(build_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_field_by_build_only_with_http_info(build_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_field_by_build_only_with_http_info(build_locator, field, **kwargs) # noqa: E501
return data
def serve_compatible_agents(self, queued_build_locator, **kwargs): # noqa: E501
"""serve_compatible_agents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_compatible_agents(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str queued_build_locator: (required)
:param str fields:
:return: Agents
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_compatible_agents_with_http_info(queued_build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_compatible_agents_with_http_info(queued_build_locator, **kwargs) # noqa: E501
return data
def serve_tags(self, build_locator, **kwargs): # noqa: E501
"""serve_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_tags(build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_tags_with_http_info(build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_tags_with_http_info(build_locator, **kwargs) # noqa: E501
return data
def set_build_queue_order(self, **kwargs): # noqa: E501
"""set_build_queue_order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_build_queue_order(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param Builds body:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_build_queue_order_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__set_build_queue_order_with_http_info(**kwargs) # noqa: E501
return data
def set_build_queue_position(self, queue_position, **kwargs): # noqa: E501
"""set_build_queue_position # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_build_queue_position(queue_position, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str queue_position: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_build_queue_position_with_http_info(queue_position, **kwargs) # noqa: E501
else:
(data) = self.__set_build_queue_position_with_http_info(queue_position, **kwargs) # noqa: E501
return data
def set_build_queue_position_0(self, queue_position, **kwargs): # noqa: E501
"""set_build_queue_position_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_build_queue_position_0(queue_position, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str queue_position: (required)
:param Build body:
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_build_queue_position_0_with_http_info(queue_position, **kwargs) # noqa: E501
else:
(data) = self.__set_build_queue_position_0_with_http_info(queue_position, **kwargs) # noqa: E501
return data
def __add_tags_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""add_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_tags_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param Tags body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `add_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{buildLocator}/tags', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __cancel_build_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""cancel_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__cancel_build_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:return: BuildCancelRequest
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `cancel_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{buildLocator}/example/buildCancelRequest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildCancelRequest', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __cancel_build_0_with_http_info(self, queued_build_locator, **kwargs): # noqa: E501
"""cancel_build_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__cancel_build_0_with_http_info(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str queued_build_locator: (required)
:param BuildCancelRequest body:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['queued_build_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_build_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'queued_build_locator' is set
if ('queued_build_locator' not in params or
params['queued_build_locator'] is None):
raise ValueError("Missing the required parameter `queued_build_locator` when calling `cancel_build_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'queued_build_locator' in params:
if isinstance(params['queued_build_locator'], TeamCityObject):
path_params['queuedBuildLocator'] = params['queued_build_locator'].locator_id
else:
path_params['queuedBuildLocator'] = params['queued_build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{queuedBuildLocator}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_build_with_http_info(self, queued_build_locator, **kwargs): # noqa: E501
"""delete_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_build_with_http_info(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str queued_build_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['queued_build_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'queued_build_locator' is set
if ('queued_build_locator' not in params or
params['queued_build_locator'] is None):
raise ValueError("Missing the required parameter `queued_build_locator` when calling `delete_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'queued_build_locator' in params:
if isinstance(params['queued_build_locator'], TeamCityObject):
path_params['queuedBuildLocator'] = params['queued_build_locator'].locator_id
else:
path_params['queuedBuildLocator'] = params['queued_build_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{queuedBuildLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_builds_experimental_with_http_info(self, **kwargs): # noqa: E501
"""delete_builds_experimental # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_builds_experimental_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param str fields:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_builds_experimental" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_build_with_http_info(self, queued_build_locator, **kwargs): # noqa: E501
"""get_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_build_with_http_info(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str queued_build_locator: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['queued_build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_build" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'queued_build_locator' is set
if ('queued_build_locator' not in params or
params['queued_build_locator'] is None):
raise ValueError("Missing the required parameter `queued_build_locator` when calling `get_build`") # noqa: E501
collection_formats = {}
path_params = {}
if 'queued_build_locator' in params:
if isinstance(params['queued_build_locator'], TeamCityObject):
path_params['queuedBuildLocator'] = params['queued_build_locator'].locator_id
else:
path_params['queuedBuildLocator'] = params['queued_build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{queuedBuildLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_builds_with_http_info(self, **kwargs): # noqa: E501
"""get_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_builds_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_builds" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Builds', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __queue_new_build_with_http_info(self, **kwargs): # noqa: E501
"""queue_new_build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__queue_new_build_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Build body:
:param bool move_to_top:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'move_to_top'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method queue_new_build" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'move_to_top' in params:
query_params.append(('moveToTop', params['move_to_top'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_builds_with_http_info(self, **kwargs): # noqa: E501
"""replace_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_builds_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Builds body:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_builds" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Builds', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_tags_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""replace_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_tags_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str locator:
:param Tags body:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `replace_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{buildLocator}/tags', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tags', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_field_by_build_only_with_http_info(self, build_locator, field, **kwargs): # noqa: E501
"""serve_build_field_by_build_only # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_field_by_build_only_with_http_info(build_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_field_by_build_only" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_field_by_build_only`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `serve_build_field_by_build_only`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{buildLocator}/{field}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_compatible_agents_with_http_info(self, queued_build_locator, **kwargs): # noqa: E501
"""serve_compatible_agents # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_compatible_agents_with_http_info(queued_build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str queued_build_locator: (required)
:param str fields:
:return: Agents
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['queued_build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_compatible_agents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'queued_build_locator' is set
if ('queued_build_locator' not in params or
params['queued_build_locator'] is None):
raise ValueError("Missing the required parameter `queued_build_locator` when calling `serve_compatible_agents`") # noqa: E501
collection_formats = {}
path_params = {}
if 'queued_build_locator' in params:
if isinstance(params['queued_build_locator'], TeamCityObject):
path_params['queuedBuildLocator'] = params['queued_build_locator'].locator_id
else:
path_params['queuedBuildLocator'] = params['queued_build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{queuedBuildLocator}/compatibleAgents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Agents', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_tags_with_http_info(self, build_locator, **kwargs): # noqa: E501
"""serve_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_tags_with_http_info(build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str build_locator: (required)
:param str locator:
:param str fields:
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['build_locator', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/{buildLocator}/tags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tags', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_build_queue_order_with_http_info(self, **kwargs): # noqa: E501
"""set_build_queue_order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_build_queue_order_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Builds body:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_build_queue_order" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/order', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Builds', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_build_queue_position_with_http_info(self, queue_position, **kwargs): # noqa: E501
"""set_build_queue_position # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_build_queue_position_with_http_info(queue_position, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str queue_position: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['queue_position', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_build_queue_position" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'queue_position' is set
if ('queue_position' not in params or
params['queue_position'] is None):
raise ValueError("Missing the required parameter `queue_position` when calling `set_build_queue_position`") # noqa: E501
collection_formats = {}
path_params = {}
if 'queue_position' in params:
if isinstance(params['queue_position'], TeamCityObject):
path_params['queuePosition'] = params['queue_position'].locator_id
else:
path_params['queuePosition'] = params['queue_position'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/order/{queuePosition}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_build_queue_position_0_with_http_info(self, queue_position, **kwargs): # noqa: E501
"""set_build_queue_position_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_build_queue_position_0_with_http_info(queue_position, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str queue_position: (required)
:param Build body:
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['queue_position', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_build_queue_position_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'queue_position' is set
if ('queue_position' not in params or
params['queue_position'] is None):
raise ValueError("Missing the required parameter `queue_position` when calling `set_build_queue_position_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'queue_position' in params:
if isinstance(params['queue_position'], TeamCityObject):
path_params['queuePosition'] = params['queue_position'].locator_id
else:
path_params['queuePosition'] = params['queue_position'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildQueue/order/{queuePosition}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.451447
| 139
| 0.605583
| 6,708
| 59,792
| 5.090936
| 0.026685
| 0.043104
| 0.035842
| 0.033734
| 0.974495
| 0.968814
| 0.966266
| 0.957833
| 0.952416
| 0.949048
| 0
| 0.014727
| 0.30387
| 59,792
| 1,554
| 140
| 38.47619
| 0.805732
| 0.286393
| 0
| 0.819015
| 1
| 0
| 0.19139
| 0.04965
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037801
| false
| 0
| 0.010309
| 0
| 0.105384
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
007a66dc285844b90e6be768675ae0f3e0615fea
| 13,564
|
py
|
Python
|
common/build.py
|
JohnHillegass/celpy
|
a273133b0502d2d228ad5459e08b2463b2c8f10b
|
[
"BSD-3-Clause"
] | null | null | null |
common/build.py
|
JohnHillegass/celpy
|
a273133b0502d2d228ad5459e08b2463b2c8f10b
|
[
"BSD-3-Clause"
] | null | null | null |
common/build.py
|
JohnHillegass/celpy
|
a273133b0502d2d228ad5459e08b2463b2c8f10b
|
[
"BSD-3-Clause"
] | null | null | null |
# python build stubs for package common
# File is generated by gopy. Do not edit.
# gopy build -output=common github.com/google/cel-go/common
from pybindgen import retval, param, Module
import sys
mod = Module('_common')
mod.add_include('"common_go.h"')
mod.add_function('GoPyInit', None, [])
mod.add_function('DecRef', None, [param('int64_t', 'handle')])
mod.add_function('IncRef', None, [param('int64_t', 'handle')])
mod.add_function('NumHandles', retval('int'), [])
mod.add_function('Slice_bool_CTor', retval('int64_t'), [])
mod.add_function('Slice_bool_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_bool_elem', retval('bool'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_bool_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('bool', 'value')])
mod.add_function('Slice_bool_append', None, [param('int64_t', 'handle'), param('bool', 'value')])
mod.add_function('Slice_byte_CTor', retval('int64_t'), [])
mod.add_function('Slice_byte_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_byte_elem', retval('uint8_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_byte_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('uint8_t', 'value')])
mod.add_function('Slice_byte_append', None, [param('int64_t', 'handle'), param('uint8_t', 'value')])
mod.add_function('Slice_float32_CTor', retval('int64_t'), [])
mod.add_function('Slice_float32_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_float32_elem', retval('float'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_float32_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('float', 'value')])
mod.add_function('Slice_float32_append', None, [param('int64_t', 'handle'), param('float', 'value')])
mod.add_function('Slice_float64_CTor', retval('int64_t'), [])
mod.add_function('Slice_float64_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_float64_elem', retval('double'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_float64_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('double', 'value')])
mod.add_function('Slice_float64_append', None, [param('int64_t', 'handle'), param('double', 'value')])
mod.add_function('Slice_int_CTor', retval('int64_t'), [])
mod.add_function('Slice_int_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_int_elem', retval('int64_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_int_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int64_t', 'value')])
mod.add_function('Slice_int_append', None, [param('int64_t', 'handle'), param('int64_t', 'value')])
mod.add_function('Slice_int16_CTor', retval('int64_t'), [])
mod.add_function('Slice_int16_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_int16_elem', retval('int16_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_int16_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int16_t', 'value')])
mod.add_function('Slice_int16_append', None, [param('int64_t', 'handle'), param('int16_t', 'value')])
mod.add_function('Slice_int32_CTor', retval('int64_t'), [])
mod.add_function('Slice_int32_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_int32_elem', retval('int32_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_int32_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int32_t', 'value')])
mod.add_function('Slice_int32_append', None, [param('int64_t', 'handle'), param('int32_t', 'value')])
mod.add_function('Slice_int64_CTor', retval('int64_t'), [])
mod.add_function('Slice_int64_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_int64_elem', retval('int64_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_int64_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int64_t', 'value')])
mod.add_function('Slice_int64_append', None, [param('int64_t', 'handle'), param('int64_t', 'value')])
mod.add_function('Slice_int8_CTor', retval('int64_t'), [])
mod.add_function('Slice_int8_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_int8_elem', retval('int8_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_int8_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int8_t', 'value')])
mod.add_function('Slice_int8_append', None, [param('int64_t', 'handle'), param('int8_t', 'value')])
mod.add_function('Slice_rune_CTor', retval('int64_t'), [])
mod.add_function('Slice_rune_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_rune_elem', retval('int32_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_rune_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int32_t', 'value')])
mod.add_function('Slice_rune_append', None, [param('int64_t', 'handle'), param('int32_t', 'value')])
mod.add_function('Slice_string_CTor', retval('int64_t'), [])
mod.add_function('Slice_string_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_string_elem', retval('char*'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_string_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('char*', 'value')])
mod.add_function('Slice_string_append', None, [param('int64_t', 'handle'), param('char*', 'value')])
mod.add_function('Slice_uint_CTor', retval('int64_t'), [])
mod.add_function('Slice_uint_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_uint_elem', retval('uint64_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_uint_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('uint64_t', 'value')])
mod.add_function('Slice_uint_append', None, [param('int64_t', 'handle'), param('uint64_t', 'value')])
mod.add_function('Slice_uint16_CTor', retval('int64_t'), [])
mod.add_function('Slice_uint16_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_uint16_elem', retval('uint16_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_uint16_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('uint16_t', 'value')])
mod.add_function('Slice_uint16_append', None, [param('int64_t', 'handle'), param('uint16_t', 'value')])
mod.add_function('Slice_uint32_CTor', retval('int64_t'), [])
mod.add_function('Slice_uint32_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_uint32_elem', retval('uint32_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_uint32_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('uint32_t', 'value')])
mod.add_function('Slice_uint32_append', None, [param('int64_t', 'handle'), param('uint32_t', 'value')])
mod.add_function('Slice_uint64_CTor', retval('int64_t'), [])
mod.add_function('Slice_uint64_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_uint64_elem', retval('uint64_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_uint64_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('uint64_t', 'value')])
mod.add_function('Slice_uint64_append', None, [param('int64_t', 'handle'), param('uint64_t', 'value')])
mod.add_function('Slice_uint8_CTor', retval('int64_t'), [])
mod.add_function('Slice_uint8_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_uint8_elem', retval('uint8_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_uint8_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('uint8_t', 'value')])
mod.add_function('Slice_uint8_append', None, [param('int64_t', 'handle'), param('uint8_t', 'value')])
mod.add_function('Slice_Ptr_expr_Expr_CTor', retval('int64_t'), [])
mod.add_function('Slice_Ptr_expr_Expr_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_Ptr_expr_Expr_elem', retval('int64_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_Ptr_expr_Expr_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int64_t', 'value')])
mod.add_function('Slice_Ptr_expr_Expr_append', None, [param('int64_t', 'handle'), param('int64_t', 'value')])
mod.add_function('Slice_Ptr_expr_Expr_CreateStruct_Entry_CTor', retval('int64_t'), [])
mod.add_function('Slice_Ptr_expr_Expr_CreateStruct_Entry_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_Ptr_expr_Expr_CreateStruct_Entry_elem', retval('int64_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_Ptr_expr_Expr_CreateStruct_Entry_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int64_t', 'value')])
mod.add_function('Slice_Ptr_expr_Expr_CreateStruct_Entry_append', None, [param('int64_t', 'handle'), param('int64_t', 'value')])
mod.add_function('Slice_common_Error_CTor', retval('int64_t'), [])
mod.add_function('Slice_common_Error_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Slice_common_Error_elem', retval('int64_t'), [param('int64_t', 'handle'), param('int', 'idx')])
mod.add_function('Slice_common_Error_set', None, [param('int64_t', 'handle'), param('int', 'idx'), param('int64_t', 'value')])
mod.add_function('Slice_common_Error_append', None, [param('int64_t', 'handle'), param('int64_t', 'value')])
mod.add_function('Map_int64_Ptr_expr_Expr_CTor', retval('int64_t'), [])
mod.add_function('Map_int64_Ptr_expr_Expr_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Map_int64_Ptr_expr_Expr_elem', retval('int64_t'), [param('int64_t', 'handle'), param('int64_t', '_ky')])
mod.add_function('Map_int64_Ptr_expr_Expr_contains', retval('bool'), [param('int64_t', 'handle'), param('int64_t', '_ky')])
mod.add_function('Map_int64_Ptr_expr_Expr_set', None, [param('int64_t', 'handle'), param('int64_t', 'key'), param('int64_t', 'value')])
mod.add_function('Map_int64_Ptr_expr_Expr_delete', None, [param('int64_t', 'handle'), param('int64_t', '_ky')])
mod.add_function('Map_int64_Ptr_expr_Expr_keys', retval('int64_t'), [param('int64_t', 'handle')])
mod.add_function('Map_int64_int32_CTor', retval('int64_t'), [])
mod.add_function('Map_int64_int32_len', retval('int'), [param('int64_t', 'handle')])
mod.add_function('Map_int64_int32_elem', retval('int32_t'), [param('int64_t', 'handle'), param('int64_t', '_ky')])
mod.add_function('Map_int64_int32_contains', retval('bool'), [param('int64_t', 'handle'), param('int64_t', '_ky')])
mod.add_function('Map_int64_int32_set', None, [param('int64_t', 'handle'), param('int64_t', 'key'), param('int32_t', 'value')])
mod.add_function('Map_int64_int32_delete', None, [param('int64_t', 'handle'), param('int64_t', '_ky')])
mod.add_function('Map_int64_int32_keys', retval('int64_t'), [param('int64_t', 'handle')])
mod.add_function('common_NoLocation', retval('int64_t'), [])
mod.add_function('common_Set_NoLocation', None, [param('int64_t', 'val')])
mod.add_function('common_Location_Column', retval('int64_t'), [param('int64_t', '_handle')])
mod.add_function('common_Location_Line', retval('int64_t'), [param('int64_t', '_handle')])
mod.add_function('common_Source_Content', retval('char*'), [param('int64_t', '_handle')])
mod.add_function('common_Source_Description', retval('char*'), [param('int64_t', '_handle')])
mod.add_function('common_Source_LineOffsets', retval('int64_t'), [param('int64_t', '_handle')])
mod.add_function('common_Source_NewLocation', retval('int64_t'), [param('int64_t', '_handle'), param('int64_t', 'line'), param('int64_t', 'col')])
mod.add_function('common_Errors_CTor', retval('int64_t'), [])
mod.add_function('common_Errors_GetErrors', retval('int64_t'), [param('int64_t', '_handle')])
mod.add_function('common_Errors_Append', retval('int64_t'), [param('int64_t', '_handle'), param('int64_t', 'errs')])
mod.add_function('common_Errors_ToDisplayString', retval('char*'), [param('int64_t', '_handle')])
mod.add_function('common_SourceLocation_CTor', retval('int64_t'), [])
mod.add_function('common_SourceLocation_Line', retval('int64_t'), [param('int64_t', '_handle')])
mod.add_function('common_SourceLocation_Column', retval('int64_t'), [param('int64_t', '_handle')])
mod.add_function('common_Error_CTor', retval('int64_t'), [])
mod.add_function('common_Error_Location_Get', retval('int64_t'), [param('int64_t', 'handle')])
mod.add_function('common_Error_Location_Set', None, [param('int64_t', 'handle'), param('int64_t', 'val')])
mod.add_function('common_Error_Message_Get', retval('char*'), [param('int64_t', 'handle')])
mod.add_function('common_Error_Message_Set', None, [param('int64_t', 'handle'), param('char*', 'val')])
mod.add_function('common_Error_ToDisplayString', retval('char*'), [param('int64_t', '_handle'), param('int64_t', 'source')])
mod.add_function('common_NewErrors', retval('int64_t'), [param('int64_t', 'source')])
mod.add_function('common_NewInfoSource', retval('int64_t'), [param('int64_t', 'info')])
mod.add_function('common_NewLocation', retval('int64_t'), [param('int64_t', 'line'), param('int64_t', 'column')])
mod.add_function('common_NewStringSource', retval('int64_t'), [param('char*', 'contents'), param('char*', 'description')])
mod.add_function('common_NewTextSource', retval('int64_t'), [param('char*', 'text')])
mod.generate(open('common.c', 'w'))
| 89.236842
| 146
| 0.708272
| 1,996
| 13,564
| 4.45992
| 0.053106
| 0.122669
| 0.218603
| 0.202426
| 0.93024
| 0.886318
| 0.863514
| 0.773309
| 0.647944
| 0.622669
| 0
| 0.04302
| 0.055736
| 13,564
| 151
| 147
| 89.827815
| 0.652014
| 0.009953
| 0
| 0
| 1
| 0
| 0.415524
| 0.08306
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013889
| 0
| 0.013889
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
00813504da6e39fda4e95782e3b16bf4e1cdff99
| 126
|
py
|
Python
|
network-error-logging/support/redirect.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 8
|
2019-04-09T21:13:05.000Z
|
2021-11-23T17:25:18.000Z
|
network-error-logging/support/redirect.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 21
|
2021-03-31T19:48:22.000Z
|
2022-03-12T00:24:53.000Z
|
network-error-logging/support/redirect.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 11
|
2019-04-12T01:20:16.000Z
|
2021-11-23T17:25:02.000Z
|
# Always redirects to no-policy-pass.png.
def main(request, response):
return 302, [("Location", "no-policy-pass.png")], ""
| 31.5
| 54
| 0.68254
| 18
| 126
| 4.777778
| 0.777778
| 0.186047
| 0.27907
| 0.348837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.119048
| 126
| 3
| 55
| 42
| 0.747748
| 0.309524
| 0
| 0
| 0
| 0
| 0.305882
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
008538a21cf502bad07242144abbe1c659c056fa
| 10,984
|
py
|
Python
|
pycsmaca/simulations/shortcuts.py
|
larioandr/pycsmaca
|
3788c4f5d1f7adace3ceedbd707a14226a1e8713
|
[
"MIT"
] | 1
|
2021-01-17T15:49:03.000Z
|
2021-01-17T15:49:03.000Z
|
pycsmaca/simulations/shortcuts.py
|
larioandr/pycsmaca
|
3788c4f5d1f7adace3ceedbd707a14226a1e8713
|
[
"MIT"
] | null | null | null |
pycsmaca/simulations/shortcuts.py
|
larioandr/pycsmaca
|
3788c4f5d1f7adace3ceedbd707a14226a1e8713
|
[
"MIT"
] | 1
|
2021-03-16T01:52:51.000Z
|
2021-03-16T01:52:51.000Z
|
from collections import namedtuple
from .wireless_networks import CollisionDomainNetwork, \
CollisionDomainSaturatedNetwork, WirelessHalfDuplexLineNetwork
from .wired_networks import WiredLineNetwork
from pydesim import simulate, Logger
SPEED_OF_LIGHT = 299792458.0
def collision_domain_network(
num_clients, payload_size, source_interval, ack_size, mac_header_size,
phy_header_size, preamble, bitrate, difs, sifs, slot, cwmin, cwmax,
queue_capacity=None, connection_radius=100,
speed_of_light=SPEED_OF_LIGHT, sim_time_limit=1000,
log_level=Logger.Level.INFO):
ret = simulate(
CollisionDomainNetwork,
stime_limit=sim_time_limit,
params=dict(
num_stations=(num_clients + 1),
payload_size=payload_size,
source_interval=source_interval,
mac_header_size=mac_header_size,
phy_header_size=phy_header_size,
ack_size=ack_size,
preamble=preamble,
bitrate=bitrate,
difs=difs,
sifs=sifs,
slot=slot,
cwmin=cwmin,
cwmax=cwmax,
connection_radius=connection_radius,
speed_of_light=speed_of_light,
queue_capacity=queue_capacity,
), loglevel=log_level
)
simret_class = namedtuple('SimRet', ['clients', 'server', 'network'])
client_class = namedtuple('Client', [
'service_time', 'num_retries', 'queue_size', 'busy',
'source_intervals', 'num_packets_sent', 'queue_drop_ratio',
'queue_wait',
])
server_class = namedtuple('Server', [
'arrival_intervals', 'num_rx_collided', 'num_rx_success',
'num_packets_received', 'collision_ratio',
])
clients = [
client_class(
service_time=cli.interfaces[0].transmitter.service_time,
num_retries=cli.interfaces[0].transmitter.num_retries_vector,
queue_size=cli.interfaces[0].queue.size_trace,
busy=cli.interfaces[0].transmitter.busy_trace,
source_intervals=cli.source.arrival_intervals.statistic(),
num_packets_sent=cli.interfaces[0].transmitter.num_sent,
queue_drop_ratio=cli.interfaces[0].queue.drop_ratio,
queue_wait=cli.interfaces[0].queue.wait_intervals,
) for cli in ret.data.clients
]
srv = ret.data.server
server = server_class(
arrival_intervals=srv.sink.arrival_intervals.statistic(),
num_rx_collided=srv.interfaces[0].receiver.num_collisions,
num_rx_success=srv.interfaces[0].receiver.num_received,
num_packets_received=srv.sink.num_packets_received,
collision_ratio=srv.interfaces[0].receiver.collision_ratio,
)
return simret_class(clients=clients, server=server, network=ret.data)
def collision_domain_saturated_network(
num_clients, payload_size, ack_size, mac_header_size,
phy_header_size, preamble, bitrate, difs, sifs, slot, cwmin, cwmax,
queue_capacity=None, connection_radius=100,
speed_of_light=SPEED_OF_LIGHT, sim_time_limit=1000,
log_level=Logger.Level.INFO):
ret = simulate(
CollisionDomainSaturatedNetwork,
stime_limit=sim_time_limit,
params=dict(
num_stations=(num_clients + 1),
payload_size=payload_size,
mac_header_size=mac_header_size,
phy_header_size=phy_header_size,
ack_size=ack_size,
preamble=preamble,
bitrate=bitrate,
difs=difs,
sifs=sifs,
slot=slot,
cwmin=cwmin,
cwmax=cwmax,
connection_radius=connection_radius,
speed_of_light=speed_of_light,
queue_capacity=queue_capacity,
), loglevel=log_level
)
simret_class = namedtuple('SimRet', ['clients', 'server', 'network'])
client_class = namedtuple('Client', [
'service_time', 'num_retries', 'queue_size', 'busy',
'source_intervals', 'num_packets_sent',
])
server_class = namedtuple('Server', [
'arrival_intervals', 'num_rx_collided', 'num_rx_success',
'num_packets_received', 'collision_ratio',
])
clients = [
client_class(
service_time=cli.interfaces[0].transmitter.service_time,
num_retries=cli.interfaces[0].transmitter.num_retries_vector,
queue_size=cli.interfaces[0].queue.size_trace,
busy=cli.interfaces[0].transmitter.busy_trace,
source_intervals=cli.source.arrival_intervals.statistic(),
num_packets_sent=cli.interfaces[0].transmitter.num_sent,
) for cli in ret.data.clients
]
srv = ret.data.server
server = server_class(
arrival_intervals=srv.sink.arrival_intervals.statistic(),
num_rx_collided=srv.interfaces[0].receiver.num_collisions,
num_rx_success=srv.interfaces[0].receiver.num_received,
num_packets_received=srv.sink.num_packets_received,
collision_ratio=srv.interfaces[0].receiver.collision_ratio,
)
return simret_class(clients=clients, server=server, network=ret.data)
def wireless_half_duplex_line_network(
num_clients, payload_size, source_interval, ack_size, mac_header_size,
phy_header_size, preamble, bitrate, difs, sifs, slot, cwmin, cwmax,
queue_capacity=None, active_sources=(0,), connection_radius=120,
distance=100, speed_of_light=SPEED_OF_LIGHT, sim_time_limit=1000,
log_level=Logger.Level.INFO):
ret = simulate(
WirelessHalfDuplexLineNetwork,
stime_limit=sim_time_limit,
params=dict(
num_stations=(num_clients + 1),
active_sources=active_sources,
payload_size=payload_size,
source_interval=source_interval,
mac_header_size=mac_header_size,
phy_header_size=phy_header_size,
ack_size=ack_size,
preamble=preamble,
bitrate=bitrate,
difs=difs,
sifs=sifs,
slot=slot,
cwmin=cwmin,
cwmax=cwmax,
connection_radius=connection_radius,
distance=distance,
speed_of_light=speed_of_light,
queue_capacity=queue_capacity,
), loglevel=log_level
)
simret_class = namedtuple('SimRet', ['clients', 'server', 'network'])
client_class = namedtuple('Client', [
'service_time', 'num_retries', 'queue_size', 'tx_busy', 'rx_busy',
'source_intervals', 'num_packets_sent', 'delay', 'sid',
'arrival_intervals', 'queue_drop_ratio', 'collision_ratio',
'queue_wait',
])
server_class = namedtuple('Server', [
'arrival_intervals', 'num_rx_collided', 'num_rx_success',
'num_packets_received', 'collision_ratio',
])
# Helper lists and objects:
_client_sources = [cli.source for cli in ret.data.clients]
_client_ifaces = [cli.interfaces[0] for cli in ret.data.clients]
_srv = ret.data.server
clients = [
client_class(
service_time=iface.transmitter.service_time,
num_retries=iface.transmitter.num_retries_vector,
queue_size=iface.queue.size_trace,
tx_busy=iface.transmitter.busy_trace,
rx_busy=iface.receiver.busy_trace,
source_intervals=(
src.arrival_intervals.statistic() if src else None),
num_packets_sent=iface.transmitter.num_sent,
delay=(_srv.sink.source_delays.get(src.source_id) if src else None),
sid=(src.source_id if src else None),
arrival_intervals=iface.queue.arrival_intervals.statistic(),
queue_drop_ratio=iface.queue.drop_ratio,
collision_ratio=iface.receiver.collision_ratio,
queue_wait=iface.queue.wait_intervals,
) for src, iface in zip(_client_sources, _client_ifaces)
]
server = server_class(
arrival_intervals=_srv.sink.arrival_intervals.statistic(),
num_rx_collided=_srv.interfaces[0].receiver.num_collisions,
num_rx_success=_srv.interfaces[0].receiver.num_received,
num_packets_received=_srv.sink.num_packets_received,
collision_ratio=_srv.interfaces[0].receiver.collision_ratio,
)
return simret_class(clients=clients, server=server, network=ret.data)
def wired_line_network(
num_clients, payload_size, source_interval, header_size, bitrate,
preamble=0, ifs=None, distance=100, queue_capacity=None,
active_sources=(0,), speed_of_light=SPEED_OF_LIGHT,
sim_time_limit=1000, log_level=Logger.Level.INFO):
if ifs is None:
ifs = 1 / bitrate
ret = simulate(
WiredLineNetwork,
stime_limit=sim_time_limit,
params=dict(
num_stations=(num_clients + 1),
payload_size=payload_size,
source_interval=source_interval,
header_size=header_size,
bitrate=bitrate,
distance=distance,
speed_of_light=speed_of_light,
active_sources=active_sources,
preamble=preamble,
ifs=ifs,
queue_capacity=queue_capacity,
),
loglevel=log_level,
)
simret_class = namedtuple('SimRet', ['clients', 'server', 'network'])
client_class = namedtuple('Client', [
'service_time', 'queue_size', 'tx_busy', 'rx_busy',
'source_intervals', 'num_packets_sent', 'delay', 'sid',
'arrival_intervals', 'queue_drop_ratio', 'queue_wait',
])
server_class = namedtuple('Server', [
'arrival_intervals', 'num_packets_received',
])
# Helper lists and objects:
_client_sources = [cli.source for cli in ret.data.clients]
_client_ifaces = [(cli.interfaces[0], cli.interfaces[-1])
for cli in ret.data.clients]
_srv = ret.data.server
clients = [
client_class(
service_time=out_if.transceiver.service_time,
queue_size=out_if.queue.size_trace,
tx_busy=out_if.transceiver.tx_busy_trace,
rx_busy=inp_if.transceiver.rx_busy_trace,
source_intervals=(
src.arrival_intervals.statistic() if src else None),
num_packets_sent=out_if.transceiver.num_transmitted_packets,
delay=(_srv.sink.source_delays.get(src.source_id) if src else None),
sid=(src.source_id if src else None),
arrival_intervals=out_if.queue.arrival_intervals.statistic(),
queue_drop_ratio=out_if.queue.drop_ratio,
queue_wait=out_if.queue.wait_intervals,
) for src, (inp_if, out_if) in zip(_client_sources, _client_ifaces)
]
server = server_class(
arrival_intervals=_srv.sink.arrival_intervals.statistic(),
num_packets_received=_srv.sink.num_packets_received,
)
return simret_class(clients=clients, server=server, network=ret.data)
| 39.088968
| 80
| 0.659232
| 1,267
| 10,984
| 5.36543
| 0.09629
| 0.037217
| 0.030009
| 0.025154
| 0.85481
| 0.82083
| 0.808914
| 0.795969
| 0.774493
| 0.774493
| 0
| 0.008799
| 0.24472
| 10,984
| 280
| 81
| 39.228571
| 0.810632
| 0.004643
| 0
| 0.702811
| 0
| 0
| 0.078683
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016064
| false
| 0
| 0.016064
| 0
| 0.048193
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
008b7d1ac38d4c33aee160fdec748050fb593bdb
| 60,548
|
py
|
Python
|
odor_tracking_sim/swarm_models.py
|
annierak/odor_tracking_sim
|
4600a7be942666c3a5a0f366dab6d14838f332a0
|
[
"MIT"
] | null | null | null |
odor_tracking_sim/swarm_models.py
|
annierak/odor_tracking_sim
|
4600a7be942666c3a5a0f366dab6d14838f332a0
|
[
"MIT"
] | null | null | null |
odor_tracking_sim/swarm_models.py
|
annierak/odor_tracking_sim
|
4600a7be942666c3a5a0f366dab6d14838f332a0
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import scipy
import scipy.stats
import numpy as np
import matplotlib.pyplot as plt
import time
from odor_models import FakeDiffusionOdorField
from pompy import models
from utility import unit_vector
from utility import rotate_vecs
from utility import distance
from utility import par_perp
from utility import fit_von_mises
from utility import cartesian_to_polar
from utility import speed_sigmoid_func
import utils_find_1st as utf1st
class BasicSwarmOfFlies(object):
"""
New vectorized (faster) fly model.
"""
DefaultSize = 500
DefaultParam = {
'dt' : 0.25,
'initial_heading_dist': scipy.stats.uniform(0,2*scipy.pi), #continuous_distribution object
'initial_heading' : scipy.radians(scipy.random.uniform(0.0,360.0,(DefaultSize,))),
'x_start_position' : scipy.zeros((DefaultSize,)),
'y_start_position' : scipy.zeros((DefaultSize,)),
'surging_error_dist' : scipy.stats.laplace(loc=0.,scale=1.),
'surging_error_std' : scipy.radians(5.),
'flight_speed' : scipy.full((DefaultSize,), 0.7),
'release_time' : scipy.full((DefaultSize,), 0.0),
'release_time_constant': None,
'cast_interval' : [1.0, 10.0],
'wind_slippage' : (0.0,0.0), #(// to fly's path, perp to fly's path)
'odor_thresholds' : {
'lower': 0.002,
'upper': 0.004
},
'cast_timeout':20,
'odor_probabilities' : {
'lower': 0.9, # detection probability/sec of exposure
'upper': 0.002, # detection probability/sec of exposure
},
'schmitt_trigger' : True,
'low_pass_filter_length':3,
'reset_distribution': scipy.stats.uniform(0,2*scipy.pi),
'pure_advection': False,
'airspeed_saturation': False
}
#If the fly is doing pure_advection, it is carried by the wind whenever it
#is in startmode. This is implemented by assigning wind_slippage to 0,0),
#AND ignoring the x, y velocities when updating position when in startmode,
#instead just updating position using the wind velocities. (see .update_positions)
Mode_StartMode = 0
Mode_FlyUpWind = 1
Mode_CastForOdor = 2
Mode_Trapped = 3
def __init__(self,wind_field,traps,param={},start_type='fh',
track_plume_bouts=False, #track each fly's plume interaction history
track_arena_exits=False #track each fly leaving the grid
):
#default start type is fixed heading
'''Basic parameters'''
self.param = dict(self.DefaultParam)
self.param.update(param)
self.dt = self.param['dt']
self.dt_plot=self.param['dt_plot']
self.t_stop = self.param['t_stop']
self.x_position = np.copy(self.param['x_start_position'])
self.y_position = np.copy(self.param['y_start_position'])
self.distance_to_origin = scipy.zeros((self.size))
self.num_traps = traps.num_traps
'''Open space parameters and variables'''
if(not(self.param['heading_data']==None)):
##If heading data field is provided to init, any mu/kappa information will be OVERRIDDEN
(mean,kappa) = fit_von_mises(self.param['heading_data'])
self.param['initial_heading_dist'] = scipy.stats.vonmises(loc=mean,kappa=kappa)
self.param['initial_heading'] = scipy.random.vonmises(mean,kappa,(self.param['swarm_size'],))
self.check_param()
self.x_velocity = self.param['flight_speed']*scipy.cos(self.param['initial_heading'])
self.y_velocity = self.param['flight_speed']*scipy.sin(self.param['initial_heading'])
self.mode = scipy.full((self.size,), self.Mode_StartMode, dtype=int)
if self.param['pure_advection']:
self.param['wind_slippage'] = (0,0)
self.parallel_coeff,self.perp_coeff = self.param['wind_slippage']
self.par_wind,self.perp_wind = self.get_par_perp_comps(0.,wind_field,
scipy.full((self.size,),True,dtype=bool))
#^This is the set of 2 x time arrays of the components of each fly's velocity par/perp to wind
self.start_type = start_type #Either 'fh' (fixed heading) or 'rw' (random walk)
if start_type=='rw':
self.rw_dist = scipy.stats.lognorm(0.25,scale=1)
else:
self.rw_dist = None
'''Arena tracking variables'''
self.track_arena_exits=track_arena_exits
if self.track_arena_exits:
self.still_in_arena = scipy.full(scipy.shape(self.x_position),True,dtype=bool)
'''Plume tracking related parameters and variables'''
self.lp_filter_duration = int(self.param['low_pass_filter_length']/self.dt) # in multiples of dt
self.track_plume_bouts = track_plume_bouts
self.surging_error = scipy.zeros((self.size,))
self.t_last_cast = scipy.zeros((self.size,))
#Parameters relating to cast timeout.
self.time_began_casting = scipy.full(self.size,scipy.inf)
self.reset_distribution = self.param['reset_distribution']
self.reset_pool = self.reset_distribution.rvs(2000)
self.cast_timeout = self.param['cast_timeout']
self.reset_pool_counter = 0
#for the case of the low pass filter, a vector that tracks time
#since plume update_for_odor_loss
if not(self.param['schmitt_trigger']):
self.surging_plumeless_count = scipy.zeros((self.size))
#for the case of plume bout tracking (time spent in plume tracking),
#a vector that tracks how long each fly has been in the plume if it's
#in the plume currently
if self.track_plume_bouts:
self.timesteps_since_plume_entry = scipy.full(self.size,scipy.nan)
#also, a matrix that tracks plume bout lengths for each fly,
#estimated rows is 100
self.plume_bout_lengths = scipy.zeros((100,self.size))
self.plume_bout_lengths_row = 0
self.increments_until_turn = scipy.ones((self.size,)) #This is for the Levy walk option.
cast_interval = self.param['cast_interval']
self.dt_next_cast = scipy.random.uniform(cast_interval[0], cast_interval[1], (self.size,))
self.cast_sign = scipy.random.choice([-1,1],(self.size,))
self.ever_tracked = scipy.full((self.size,), False, dtype=bool) #Bool that keeps track if the fly ever plume tracked (false=never tracked)
'''Trapped fly parameters and variables'''
self.trap_num = scipy.full((self.size,),-1, dtype=int)
self.in_trap = scipy.full((self.size,), False, dtype=bool)
self.x_trap_loc = scipy.zeros((self.size,))
self.y_trap_loc = scipy.zeros((self.size,))
self.t_in_trap = scipy.full((self.size,),scipy.inf)
self.angle_in_trap = scipy.full(self.size,scipy.inf)
def check_param(self):
"""
Check parameters - mostly just that shape of ndarrays match
"""
if scipy.ndim(self.param['initial_heading'].shape) > 1:
raise(ValueError, 'initial_heading must have ndim=1')
equal_shape_list = ['x_start_position','y_start_position','flight_speed','release_time']
for item in equal_shape_list:
if self.param[item].shape != self.param['initial_heading'].shape:
print(item)
print(self.param[item].shape,self.param['initial_heading'].shape)
raise(ValueError, '{0}.shape must equal initial_heading.shape'.format(item))
@property
def size(self):
return self.param['initial_heading'].shape[0]
def update(self, t, dt, wind_field, odor_field,traps,plumes=None,
xlim=None,ylim=None,pre_stored=False):
"""
Update fly swarm one time step.
"""
''' (0) Grab current time to track the updating time'''
last = time.time()
''' (1) Categorize flies for update type and report category counts'''
mask_release = t > self.param['release_time']
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_castfor = mask_release & (self.mode == self.Mode_CastForOdor)
self.ever_tracked = self.ever_tracked | (mask_release & (self.mode == self.Mode_FlyUpWind)) #this is true if the fly has previously tracked or has been released and is now in upwind mode
mask_trapped = self.mode == self.Mode_Trapped
mask_reset_startmode = self.ever_tracked & (self.mode == self.Mode_StartMode)
# print(str(sum(mask_castfor))+' flies are casting')
# print(str(sum(mask_flyupwd))+' flies are surging')
#Keep track of which flies have never tracked
print('time categorizing flies: '+str(time.time()-last))
last = time.time()
''' (2) Get odor and wind info for each fly that is flying '''
''' (a) odor info'''
odor = scipy.full(self.size,scipy.nan)
mask_odor_relevant = mask_release & (~mask_trapped)
if plumes is not None:
puff_array = plumes.puffs
if isinstance(odor_field,FakeDiffusionOdorField):
odor[mask_odor_relevant] = odor_field.value(
t,self.x_position[mask_odor_relevant],self.y_position[mask_odor_relevant])
elif isinstance(odor_field,models.SuttonModelPlume) or \
isinstance(odor_field,models.GaussianFitPlume) or \
isinstance(odor_field,models.OnlinePlume) or \
isinstance(odor_field,models.AdjustedGaussianFitPlume) or \
isinstance(odor_field,models.LogisticProbPlume):
odor[mask_odor_relevant] = odor_field.value(
self.x_position[mask_odor_relevant],self.y_position[mask_odor_relevant])
elif pre_stored:
odor[mask_odor_relevant] = odor_field.value(
t,self.x_position[mask_odor_relevant],self.y_position[mask_odor_relevant])
else:
odor[mask_odor_relevant]= odor_field.calc_conc_list(
puff_array, self.x_position[mask_odor_relevant],\
self.y_position[mask_odor_relevant], z=0)
print('time obtaining odor info: '+str(time.time()-last))
''' (b) wind info'''
last = time.time()
x_wind, y_wind = wind_field.value(t,self.x_position, self.y_position)
x_wind_unit, y_wind_unit = unit_vector(x_wind, y_wind)
wind_uvecs = {'x': x_wind_unit,'y': y_wind_unit}
print('time obtaining wind info: '+str(time.time()-last))
last = time.time()
'''(3) Update the fly velocities according to mode and open space behavior paradigm'''
# Update state for flies in traps
self.update_for_in_trap(t, traps)
#----Update the masks-----
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_castfor = mask_release & (self.mode == self.Mode_CastForOdor)
self.ever_tracked = self.ever_tracked | (mask_release & (self.mode == self.Mode_FlyUpWind)) #this is true if the fly has previously tracked or has been released and is now in upwind mode
mask_trapped = self.mode == self.Mode_Trapped
mask_reset_startmode = self.ever_tracked & (self.mode == self.Mode_StartMode)
if not(self.start_type=='cvrw' or self.start_type=='rw'):
#The random walk mode excludes odor detection
masks = {'startmode': mask_startmode, 'flyupwd': mask_flyupwd,
'castfor': mask_castfor}
#Before anything else, adjust the velocities of flies already in
#cast or surge mode based on the new wind direction this time step
self.update_for_changing_wind(masks,wind_uvecs)
#----Update the masks-----
mask_release = t > self.param['release_time']
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_castfor = mask_release & (self.mode == self.Mode_CastForOdor)
self.ever_tracked = self.ever_tracked | (mask_release & (self.mode == self.Mode_FlyUpWind)) #this is true if the fly has previously tracked or has been released and is now in upwind mode
mask_trapped = self.mode == self.Mode_Trapped
mask_reset_startmode = self.ever_tracked & (self.mode == self.Mode_StartMode)
# Update state for flies detecting odor plumes
self.update_for_odor_detection(dt, odor,odor_field, wind_uvecs, masks)
#----Update the masks-----
mask_release = t > self.param['release_time']
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_castfor = mask_release & (self.mode == self.Mode_CastForOdor)
self.ever_tracked = self.ever_tracked | (mask_release & (self.mode == self.Mode_FlyUpWind)) #this is true if the fly has previously tracked or has been released and is now in upwind mode
mask_trapped = self.mode == self.Mode_Trapped
mask_reset_startmode = self.ever_tracked & (self.mode == self.Mode_StartMode)
print('time updating for odor detection: '+str(time.time()-last))
last = time.time()
# Update state for files losing odor plume or already casting.
self.update_for_odor_loss(t, dt, odor, odor_field, wind_uvecs, masks)
#----Update the masks-----
mask_release = t > self.param['release_time']
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_castfor = mask_release & (self.mode == self.Mode_CastForOdor)
self.ever_tracked = self.ever_tracked | (mask_release & (self.mode == self.Mode_FlyUpWind)) #this is true if the fly has previously tracked or has been released and is now in upwind mode
mask_trapped = self.mode == self.Mode_Trapped
mask_reset_startmode = self.ever_tracked & (self.mode == self.Mode_StartMode)
print('time updating for odor loss: '+str(time.time()-last))
last = time.time()
#Update state for flies giving up on casting.
self.reset_pool_counter=self.update_for_reset(t,masks,self.reset_pool_counter)
#----Update the masks-----
mask_release = t > self.param['release_time']
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_castfor = mask_release & (self.mode == self.Mode_CastForOdor)
self.ever_tracked = self.ever_tracked | (mask_release & (self.mode == self.Mode_FlyUpWind)) #this is true if the fly has previously tracked or has been released and is now in upwind mode
mask_trapped = self.mode == self.Mode_Trapped
mask_reset_startmode = self.ever_tracked & (self.mode == self.Mode_StartMode)
# At this point, add one timestep to all entries of
# timesteps_since_plume_entry that are not nan
if self.track_plume_bouts:
add_inds = scipy.logical_not(scipy.isnan(self.timesteps_since_plume_entry))
self.timesteps_since_plume_entry[add_inds]+=1
# print('timesteps_since_plume_entry:'+str(self.timesteps_since_plume_entry))
# Update position based on mode and current velocities
''' (4) Update the positions with the new velocities'''
''' (a) Update the positions with general direction excluding par/perp slip component'''
self.update_positions(mask_release,mask_trapped,mask_startmode,x_wind,y_wind,dt)
#check for flies that have left the arena
if self.track_arena_exits:
inside_x_bounds = (xlim[0]<=self.x_position) & (self.x_position<=xlim[1])
inside_y_bounds = (ylim[0]<=self.y_position) & (self.y_position<=ylim[1])
inside_bounds = inside_x_bounds & inside_y_bounds
self.still_in_arena = self.still_in_arena & inside_bounds
print(sum(self.still_in_arena))
#Michael's idea 2/27/18: apply wind slippage according to c_1*(component
#parallel to fly's velocity) + c2*(component pe rp to fly's velocity)
''' (b) Update the positions with the perp slip component'''
self.update_par_perp_comps(t,wind_field,mask_release,mask_startmode,
mask_trapped, mask_reset_startmode)
#par/perp comps for flies not {released and in fly mode} are set to 0.
c1 = self.parallel_coeff
c2 = self.perp_coeff
self.x_position[mask_startmode] += dt*(c1*self.par_wind[0,mask_startmode]+c2*self.perp_wind[0,mask_startmode])
self.y_position[mask_startmode] += dt*(c1*self.par_wind[1,mask_startmode]+c2*self.perp_wind[1,mask_startmode])
def update_for_changing_wind(self,masks,wind_uvecs):
'''This function updates the velocities of flies in cast or surge mode
with the current wind info '''
x_wind_unit = wind_uvecs['x']
y_wind_unit = wind_uvecs['y']
mask_flyupwd = masks['flyupwd']
mask_castfor = masks['castfor']
mask_change = (mask_flyupwd) | (mask_castfor)
#Select new heading errors for the casting and surging flies
surging_error_std = self.param['surging_error_std']
distf = self.param['surging_error_dist'] #this variable is a pdf
self.surging_error[mask_change] = surging_error_std*distf.rvs(size=mask_change.sum())
# Set x and y velocities for the surging flies according to current wind
x_unit_change, y_unit_change = rotate_vecs(
x_wind_unit[mask_flyupwd],
y_wind_unit[mask_flyupwd],
self.surging_error[mask_flyupwd]
)
speed = self.param['flight_speed'][mask_flyupwd]
self.x_velocity[mask_flyupwd] = -speed*x_unit_change
self.y_velocity[mask_flyupwd] = -speed*y_unit_change
# Set x and y velocities for the casting flies according to current wind,
#keeping in mind their pre-determined cast signs (left/right)
x_unit_change, y_unit_change = rotate_vecs(
y_wind_unit[mask_castfor],
-x_wind_unit[mask_castfor],
self.surging_error[mask_castfor]
)
speed = self.param['flight_speed'][mask_castfor]
#Actually update velocities
self.x_velocity[mask_castfor] = self.cast_sign[mask_castfor]*speed*x_unit_change
self.y_velocity[mask_castfor] = self.cast_sign[mask_castfor]*speed*y_unit_change
def update_for_odor_detection(self, dt, odor, odor_field, wind_uvecs, masks):
"""
Update simulation for odor detection
* Find flies in StartMode and CastForOdor modes where the odor value >= upper threshold.
* Test if they detect odor (roll dice and compare with detection probabilty).
* If they do detect odor change their mode to FlyUpWind.
* set x and y velocities to upwind at speed
"""
x_wind_unit = wind_uvecs['x']
y_wind_unit = wind_uvecs['y']
mask_startmode = masks['startmode']
mask_castfor = masks['castfor']
if self.param['schmitt_trigger']:
#Case where mask_change (to surging) is determined by Schmitt trigger
mask_gt_upper = odor >= self.param['odor_thresholds']['upper']
mask_candidates = mask_gt_upper & (mask_startmode | mask_castfor)
dice_roll = scipy.full((self.size,),scipy.inf)
dice_roll[mask_candidates] = scipy.rand(mask_candidates.sum())
# Convert probabilty/sec to probabilty for time step interval dt
odor_probability_upper = 1.0 - (1.0 - self.param['odor_probabilities']['upper'])**dt
mask_change = dice_roll < odor_probability_upper
else:
#Case where mask_change (to surging) is determined by low-pass filter
if not(isinstance(odor_field,models.SuttonModelPlume) or \
isinstance(odor_field,models.GaussianFitPlume)):
#This is where the odor is deterministically accessed, as for pompy plumes
mask_gt_upper = odor >= self.param['odor_thresholds']['upper']
else:
#For the GaussianFitPlume, the odor value is interpreted as
#the probability the odor is greater than the threshold
mask_gt_upper = np.random.binomial(1,odor,size=np.shape(odor)).astype(bool)
mask_change = mask_gt_upper & (mask_startmode | mask_castfor)
#In both cases the mask_change flies are assigned to Mode_FlyUpWind
self.mode[mask_change] = self.Mode_FlyUpWind
if self.track_plume_bouts:
#These flies have their entries in timesteps_since_plume_entry changed from nan to 0
self.timesteps_since_plume_entry[mask_change] = 0
# Compute new heading error for flies which change mode according to Laplace dist (Floris paper)
surging_error_std = self.param['surging_error_std']
distf = self.param['surging_error_dist'] #this variable is a pdf
self.surging_error[mask_change] = surging_error_std*distf.rvs(size=mask_change.sum())
# Set x and y velocities for the flies which just changed to FlyUpWind.
'''This is the insertion of heading error for surging flies'''
x_unit_change, y_unit_change = rotate_vecs(
x_wind_unit[mask_change],
y_wind_unit[mask_change],
self.surging_error[mask_change]
)
speed = self.param['flight_speed'][mask_change]
self.x_velocity[mask_change] = -speed*x_unit_change
self.y_velocity[mask_change] = -speed*y_unit_change
def update_for_odor_loss(self, t, dt, odor, odor_field, wind_uvecs, masks):
"""
Update simulation for flies which lose odor or have lost odor and are
casting.
* Find flies in FlyUpWind mode where the odor value <= lower threshold.
* Test if they lose odor (roll dice and compare with probabilty).
* If they lose odor change mode to CastForOdor.
* Update velocties for flies in CastForOdor mode.
"""
x_wind_unit = wind_uvecs['x']
y_wind_unit = wind_uvecs['y']
mask_flyupwd = masks['flyupwd']
mask_castfor = masks['castfor']
if self.param['schmitt_trigger']:
mask_lt_lower = odor <= self.param['odor_thresholds']['lower']
mask_candidates = mask_lt_lower & mask_flyupwd
dice_roll = scipy.full((self.size,),scipy.inf)
dice_roll[mask_candidates] = scipy.rand(mask_candidates.sum())
# Convert probabilty/sec to probabilty for time step interval dt
odor_probability_lower = 1.0 - (1.0 - self.param['odor_probabilities']['lower'])**dt
mask_change = dice_roll < odor_probability_lower
else:
if not(isinstance(odor_field,models.SuttonModelPlume) or \
isinstance(odor_field,models.GaussianFitPlume)):
#Find the indices of the flies below the (single) threshold
mask_blw_thres = odor <= self.param['odor_thresholds']['upper']
else:
#For the GaussianFitPlume, the odor value is interpreted as
#the probability the odor is greater than the threshold--
#so it's under the threshold with probability (1 - odor value)
mask_blw_thres = np.random.binomial(1,1.-odor,size=np.shape(odor)).astype(bool)
#Filter by the flies who are surging
mask_candidates = mask_blw_thres & mask_flyupwd
#If the fly's counter is at 0, or at 1, add 1 to the counter and do nothing (preserve mode)
mask_categ1 = mask_candidates & (self.surging_plumeless_count<self.lp_filter_duration)
self.surging_plumeless_count[mask_categ1] +=1
self.mode[mask_categ1] = self.Mode_FlyUpWind
#If the fly's counter is at 2, assign to casting mode, and reset counter to 0
mask_change = mask_candidates & (self.surging_plumeless_count==self.lp_filter_duration)
self.surging_plumeless_count[mask_change] =0
#In both cases set mask_change to cast for odor mode
self.mode[mask_change] = self.Mode_CastForOdor
#Grab the time these flies starting casting.
self.time_began_casting[mask_change] = t
#Then drop these flies' plume bout durations into plume_bout_lengths
if self.track_plume_bouts & sum(mask_change)>0:
#(a) grab the saved index j of the first empty row in plume_bout_lengths
j = self.plume_bout_lengths_row
if j>scipy.shape(self.plume_bout_lengths)[0]-1:
self.plume_bout_lengths = scipy.append(
self.plume_bout_lengths,scipy.zeros((100,self.size)),axis=0)
print('appending')
#(b) fill plume_bout_lengths row j
self.plume_bout_lengths[j,mask_change] = self.timesteps_since_plume_entry[mask_change]
#move up row counter
self.plume_bout_lengths_row+=1
#Reassign these indices of timesteps_since_plume_entry to nan
self.timesteps_since_plume_entry[mask_change]=scipy.nan
# Lump together flies changing to CastForOdor mode with casting flies which are
# changing direction (e.g. time to make cast direction change)
mask_change |= mask_castfor & (t > (self.t_last_cast + self.dt_next_cast))
# Compute new heading errors for flies which change mode (to casting)
self.surging_error[mask_change] = self.param['surging_error_std']*scipy.randn(mask_change.sum())
# Set new cast intervals and directions for flies changing to CastForOdor or starting a new cast
cast_interval = self.param['cast_interval']
self.dt_next_cast[mask_change] = scipy.random.uniform(
cast_interval[0],
cast_interval[1],
(mask_change.sum(),)
)
self.t_last_cast[mask_change] = t
self.cast_sign[mask_change] = scipy.random.choice([-1,1],(mask_change.sum(),))
'''This is the insertion of heading error for casting flies'''
# Set x and y velocities for new CastForOdor flies
x_unit_change, y_unit_change = rotate_vecs(
y_wind_unit[mask_change],
-x_wind_unit[mask_change],
self.surging_error[mask_change]
)
speed = self.param['flight_speed'][mask_change]
self.x_velocity[mask_change] = self.cast_sign[mask_change]*speed*x_unit_change
self.y_velocity[mask_change] = self.cast_sign[mask_change]*speed*y_unit_change
def update_for_reset(self,t,masks,reset_pool_counter):
'''This is for the flies who give up on casting'''
mask_castfor = masks['castfor']
reset_mask = mask_castfor & ((t-self.time_began_casting)> self.cast_timeout)
self.mode[reset_mask] = self.Mode_StartMode
for index in range(len(reset_mask)):
if reset_mask[index]:
try:
angle = self.reset_pool[reset_pool_counter]
except(IndexError):
self.reset_pool = scipy.append(
self.reset_pool,self.reset_distribution.rvs(2000))
angle = self.reset_pool[reset_pool_counter]
self.x_velocity[index] = self.param['flight_speed'][0]*scipy.cos(angle)
self.y_velocity[index] = self.param['flight_speed'][0]*scipy.sin(angle)
reset_pool_counter +=1
return reset_pool_counter
def update_for_in_trap(self, t, traps): #******
"""
Update simulation for flies in traps.
* If flies are in traps. If so record trap info and time.
"""
sources = traps.param['source_locations'] #Of format [(0,0),]
for trap_num, trap_loc in enumerate(sources):
dist_vals = distance((self.x_position, self.y_position),trap_loc)
mask_trapped = dist_vals < traps.param['trap_radius']
self.mode[mask_trapped] = self.Mode_Trapped
self.trap_num[mask_trapped] = trap_num
self.x_trap_loc[mask_trapped] = trap_loc[0]
self.y_trap_loc[mask_trapped] = trap_loc[1]
# Get time stamp for newly trapped flies
mask_newly_trapped = mask_trapped & (self.t_in_trap == scipy.inf)
self.t_in_trap[mask_newly_trapped] = t
#Get arrival angle for newly trapped flies
vfunc = scipy.vectorize(cartesian_to_polar)
xvels,yvels = self.x_velocity[mask_newly_trapped],self.y_velocity[mask_newly_trapped]
if scipy.size(xvels)>0:
_,thetas = vfunc(xvels,yvels)
thetas = (thetas+scipy.pi)%(2*scipy.pi)
self.angle_in_trap[mask_newly_trapped] = thetas
#Stop the flies trapped
self.x_velocity[mask_trapped] = 0.0
self.y_velocity[mask_trapped] = 0.0
def get_time_trapped(self,trap_num=None,straight_shots=False):
#adjusted this function to isolate flies that went straight to traps
mask_trapped = self.mode == self.Mode_Trapped
if straight_shots:
mask_trapped = mask_trapped & scipy.logical_not(self.ever_tracked)
if trap_num is None:
return self.t_in_trap[mask_trapped]
else:
mask_trapped_in_num = mask_trapped & (self.trap_num == trap_num)
return self.t_in_trap[mask_trapped_in_num]
def get_angle_trapped(self,trap_num,time_window):
mask_trapped = self.mode == self.Mode_Trapped
mask_trapped_in_num = mask_trapped & (self.trap_num == trap_num)
if not(time_window==[]):
#This case returns angle trapped of those trapped in a given time window
time_bool = (self.t_in_trap>=time_window[0]) & (self.t_in_trap<=time_window[1])
mask_trapped_in_num = mask_trapped_in_num & time_bool
return self.angle_in_trap[mask_trapped_in_num]
def get_trap_nums(self):
mask_trap_num_set = self.trap_num != -1
trap_num_array = scipy.unique(self.trap_num[mask_trap_num_set])
trap_num_array.sort()
return list(trap_num_array)
def list_all_traps(self):
return(range(self.num_traps))
def get_trap_counts(self):
mask_trap_num_set = self.trap_num != -1
(trap_num_array,trap_counts)=scipy.unique(
self.trap_num[mask_trap_num_set],return_counts = True)
all_trap_counts = scipy.zeros(self.num_traps)
all_trap_counts[trap_num_array] = trap_counts
return all_trap_counts
def update_positions(self,mask_release,mask_trapped,mask_startmode,x_wind,y_wind,dt):
if self.start_type=='fh' or sum(mask_startmode)<1.:
mask_move = mask_release & (~mask_trapped)
print('number moving: '+str(scipy.sum(mask_move)))
if self.param['pure_advection']:
self.x_position[mask_startmode] += dt*x_wind[mask_startmode]
self.y_position[mask_startmode] += dt*y_wind[mask_startmode]
self.x_position[mask_move& (~mask_startmode)] += dt*self.x_velocity[mask_move& (~mask_startmode)]
self.y_position[mask_move& (~mask_startmode)] += dt*self.y_velocity[mask_move& (~mask_startmode)]
elif self.param['airspeed_saturation']:
wind_par = self.par_wind[:,mask_move&mask_startmode]
wind_par_mags = np.sqrt(np.sum(wind_par*wind_par,axis=0))
wind_par_signs = np.sign(np.sum(wind_par*
(np.vstack((self.x_velocity[mask_move&mask_startmode],self.y_velocity[
mask_move&mask_startmode])))
,axis=0))
signed_wind_par_mags = wind_par_mags*wind_par_signs
flight_speed = self.param['flight_speed'][0]
# #Old version: linear/flat/linear function from intended speed to effective speed
# adjusted_mag = flight_speed*np.ones_like(signed_wind_par_mags)
# adjusted_mag[signed_wind_par_mags<-0.8] = signed_wind_par_mags[signed_wind_par_mags<-0.8]+2.4
# adjusted_mag[signed_wind_par_mags>4.] = signed_wind_par_mags[signed_wind_par_mags>4.]-2.4
# adjusted_mag[(signed_wind_par_mags<4.)&(signed_wind_par_mags>-0.8)] = 1.6
#
# plt.figure()
# plt.plot(signed_wind_par_mags,adjusted_mag,'o')
# New version: a sigmoidal smoothing of the above
adjusted_mag = speed_sigmoid_func(signed_wind_par_mags)
plt.plot(signed_wind_par_mags,adjusted_mag,'o')
adjusted_mag = adjusted_mag/flight_speed #normalization to unit mag
# plt.show()
# raw_input()
self.x_position[mask_move&mask_startmode] += dt*adjusted_mag*self.x_velocity[mask_move&mask_startmode]
self.y_position[mask_move&mask_startmode] += dt*adjusted_mag*self.y_velocity[mask_move&mask_startmode]
self.x_position[mask_move&(~mask_startmode)] += dt*self.x_velocity[mask_move&(~mask_startmode)]
self.y_position[mask_move&(~mask_startmode)] += dt*self.y_velocity[mask_move&(~mask_startmode)]
else:
self.x_position[mask_move] += dt*self.x_velocity[mask_move]
self.y_position[mask_move] += dt*self.y_velocity[mask_move]
elif self.start_type=='rw':
#The flies who are not in start_mode move the same way
mask_move = mask_release & (~mask_trapped) & (~mask_startmode)
self.x_position[mask_move] += dt*self.x_velocity[mask_move]
self.y_position[mask_move] += dt*self.y_velocity[mask_move]
'''Option 1: path lengths are chosen from a heavy-tailed distribution'''
'''For those in startmode, the x step and y step of each fly is chosen from lognormal (heavy-tailed) distribution
right now the distribution is manually set up so that moving faster than peak velocity (1.8 m/s) happens with close to
0 probability: the distribution is lognormal with sigma = 0.25 and mu = 0, and then *(1.8/2.0)*timestep'''
sigma = 0.25
mu = 0
scaling_factor = (1.8/2.0)*dt #So that 1.8 m/s is the fastest it ever flies
draws = sum(mask_startmode)
self.x_position[mask_startmode] += scaling_factor*scipy.random.choice([1,-1],
size=draws)*scipy.stats.lognorm.rvs(sigma,size=draws,scale=scipy.exp(mu))
self.y_position[mask_startmode] += scaling_factor*scipy.random.choice([1,-1],
size=draws)*scipy.stats.lognorm.rvs(sigma,size=draws,scale=scipy.exp(mu))
elif self.start_type=='cvrw':
start = time.time()
#All flies update position according to velocity, including start_mode flies
mask_move = mask_release & (~mask_trapped)
self.x_position[mask_move] += dt*self.x_velocity[mask_move]
self.y_position[mask_move] += dt*self.y_velocity[mask_move]
'''Option 2: Durations of a given direction are chosen from a heavy-tailed distribution
Draw from same kind of distribution as above, but round up for discrete time steps.
Distribution is lognormal with sigma = 0.5 and mu = 0, and then *(300/3.0)/timestep,
which makes the max occuring duration around 300 s= 5 min.'''
sigma = 0.5
mu = 0.
#Every startmode fly has one time step less left in current direction
self.increments_until_turn[mask_startmode&mask_move] -=1
#print(self.increments_until_turn[mask_startmode&mask_move])
#Flies whose time is up get assigned a new direction --> x and y velocity
mask_redraw = mask_move&mask_startmode & (self.increments_until_turn == 0)
cp = time.time()
draws = sum(mask_redraw)
if draws>0:
#directions = scipy.random.choice(self.uniform_directions_pool,draws)
sines, cosines = scipy.zeros(draws),scipy.zeros(draws)
for x in xrange(draws):
direction = scipy.stats.uniform.rvs(0.0,2*scipy.pi)
sines[x],cosines[x] = scipy.sin(direction),scipy.cos(direction)
#cp1 = time.time();print('cp1 :'+str(cp1-cp))
#self.x_velocity[mask_redraw]=self.param['flight_speed'][0]*scipy.cos(directions)
self.x_velocity[mask_redraw]=self.param['flight_speed'][0]*cosines
#cp2 = time.time();print(cp2-cp1)
self.y_velocity[mask_redraw]=self.param['flight_speed'][0]*sines
#self.y_velocity[mask_redraw]=self.param['flight_speed'][0]*scipy.sin(directions)
#cp3 = time.time();print(cp3-cp2)
#and get assigned a fnew interval count until they change direction again
self.increments_until_turn[mask_redraw] = scipy.floor(#scipy.random.choice(self.increments_pool,sum(mask_redraw))
scipy.stats.lognorm.rvs(sigma,size=draws,scale=
(300/3.0)/dt*
scipy.exp(mu)))
#cp4 = time.time(); print(cp4-cp3)
#print(cp4-cp)
#Once positions are updated, update the variable that keeps track of distance to origin
self.distance_to_origin = scipy.sqrt(self.x_position**2+self.y_position**2)
def get_par_perp_comps(self,t,wind_field,mask):
x_wind, y_wind = wind_field.value(
t,self.x_position[mask], self.y_position[mask])
wind = scipy.array([x_wind,y_wind])
velocity = scipy.array([
self.x_velocity[mask],self.y_velocity[mask]])
par_vec = scipy.zeros(scipy.shape(velocity))
perp_vec = scipy.zeros(scipy.shape(velocity))
for i in range(scipy.size(velocity,1)):
u,v = velocity[:,i],wind[:,i]
par,perp = par_perp(v,u)
par_vec[:,i],perp_vec[:,i] = par,perp
return par_vec,perp_vec
def update_par_perp_comps(self,t,wind_field,mask_release,mask_startmode,mask_trapped,mask_reset_startmode):
#Check if the wind field has changed since last time-step, if so, re-compute get_par_perp_comps
if wind_field.evolving:
mask_not_stuck = scipy.logical_not(mask_trapped)
self.par_wind[:,mask_not_stuck],self.perp_wind[:,mask_not_stuck] = \
self.get_par_perp_comps(t,wind_field,mask_not_stuck)
#Set the flys who have been released and who are not in start_mode to zero par and zero perp
self.par_wind[:,mask_release&~mask_startmode] = 0.
self.perp_wind[:,mask_release&~mask_startmode] = 0.
#For the flies that just returned to startmode after casting, restore par/perp wind components
self.par_wind[:,mask_reset_startmode],self.perp_wind[:,mask_reset_startmode] = \
self.get_par_perp_comps(t,wind_field,mask_reset_startmode)
#Set the flys who have not been released to zero par and zero perp
# self.par_wind[:,~mask_release] = 0.
# self.perp_wind[:,~mask_release] = 0.
class ReducedSwarmOfFlies(object):
"""
These flies, designed to interact with the LogisticProbPlume object
skip the cast and surge process and just head straight to the source
or miss the plume depending on their draw with the logistic probability function.
They still have all the capacities for distributed release, and all the
open space navigation parameters.
Only designed to work with non-changing, straight wind.
(has no surging error)
"""
DefaultSize = 500
DefaultParam = {
'dt' : 0.25,
'initial_heading_dist': scipy.stats.uniform(0,2*scipy.pi), #continuous_distribution object
'initial_heading' : scipy.radians(scipy.random.uniform(0.0,360.0,(DefaultSize,))),
'x_start_position' : scipy.zeros((DefaultSize,)),
'y_start_position' : scipy.zeros((DefaultSize,)),
'flight_speed' : scipy.full((DefaultSize,), 0.7),
'release_time' : scipy.full((DefaultSize,), 0.0),
'release_time_constant': None,
'wind_slippage' : (0.0,0.0), #(// to fly's path, perp to fly's path)
'pure_advection': False,
'airspeed_saturation': False
}
Mode_StartMode = 0
Mode_FlyUpWind = 1
Mode_CastForOdor = 2
Mode_Trapped = 3
def __init__(self,wind_field,traps,param={},start_type='fh'):
#default start type is fixed heading
'''Basic parameters'''
self.param = dict(self.DefaultParam)
self.param.update(param)
self.dt = self.param['dt']
self.dt_plot=self.param['dt_plot']
self.t_stop = self.param['t_stop']
self.x_position = np.copy(self.param['x_start_position'])
self.y_position = np.copy(self.param['y_start_position'])
self.distance_to_origin = scipy.zeros((self.size))
self.num_traps = traps.num_traps
'''Open space parameters and variables'''
if(not(self.param['heading_data']==None)):
##If heading data field is provided to init, any mu/kappa information will be OVERRIDDEN
(mean,kappa) = fit_von_mises(self.param['heading_data'])
self.param['initial_heading_dist'] = scipy.stats.vonmises(loc=mean,kappa=kappa)
self.param['initial_heading'] = scipy.random.vonmises(mean,kappa,(self.param['swarm_size'],))
self.check_param()
self.x_velocity = self.param['flight_speed']*scipy.cos(self.param['initial_heading'])
self.y_velocity = self.param['flight_speed']*scipy.sin(self.param['initial_heading'])
self.mode = scipy.full((self.size,), self.Mode_StartMode, dtype=int)
if self.param['pure_advection']:
self.param['wind_slippage'] = (0,0)
self.parallel_coeff,self.perp_coeff = self.param['wind_slippage']
self.par_wind,self.perp_wind = self.get_par_perp_comps(0.,wind_field,
scipy.full((self.size,),True,dtype=bool))
#^This is the set of 2 x time arrays of the components of each fly's velocity par/perp to wind
self.start_type = start_type #Either 'fh' (fixed heading) or 'rw' (random walk)
if start_type=='rw':
self.rw_dist = scipy.stats.lognorm(0.25,scale=1)
else:
self.rw_dist = None
'''Trapped fly parameters and variables'''
self.trap_num = scipy.full((self.size,),-1, dtype=int)
self.in_trap = scipy.full((self.size,), False, dtype=bool)
self.x_trap_loc = scipy.zeros((self.size,))
self.y_trap_loc = scipy.zeros((self.size,))
self.t_in_trap = scipy.full((self.size,),scipy.inf)
self.angle_in_trap = scipy.full(self.size,scipy.inf)
self.mask_in_odor_band_last_step= np.zeros((self.size),dtype=bool) #variable used to make refractory period for odor detection
def check_param(self):
"""
Check parameters - mostly just that shape of ndarrays match
"""
if scipy.ndim(self.param['initial_heading'].shape) > 1:
raise(ValueError, 'initial_heading must have ndim=1')
equal_shape_list = ['x_start_position','y_start_position','flight_speed','release_time']
for item in equal_shape_list:
if self.param[item].shape != self.param['initial_heading'].shape:
print(item)
print(self.param[item].shape,self.param['initial_heading'].shape)
raise(ValueError, '{0}.shape must equal initial_heading.shape'.format(item))
@property
def size(self):
return self.param['initial_heading'].shape[0]
def update(self, t, dt, wind_field, odor_field,traps,plumes=None,
xlim=None,ylim=None,pre_stored=False):
"""
Update fly swarm one time step.
"""
''' (1) Categorize flies for update type and report category counts'''
mask_release = t > self.param['release_time']
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_trapped = self.mode == self.Mode_Trapped
last = time.time()
''' (2) Get odor and wind info for each fly that is flying '''
''' Note, for this version of the swarm model, the odor is really just
the probability value given by the LogisticProbPlume object.
(the name is a holdover)'''
odor = scipy.full(self.size,scipy.nan)
mask_odor_relevant = mask_release & (~mask_trapped)
odor[mask_odor_relevant] = odor_field.value(
self.x_position[mask_odor_relevant],self.y_position[mask_odor_relevant])
print('time obtaining odor info: '+str(time.time()-last))
''' (b) wind info'''
x_wind, y_wind = wind_field.value(t,self.x_position, self.y_position)
x_wind_unit, y_wind_unit = unit_vector(x_wind, y_wind)
wind_uvecs = {'x': x_wind_unit,'y': y_wind_unit}
'''(3) Update the fly velocities according to mode and open space behavior paradigm'''
# Update state for flies in traps
self.update_for_in_trap(t, traps)
#----Update the masks-----
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_trapped = self.mode == self.Mode_Trapped
if not(self.start_type=='cvrw' or self.start_type=='rw'):
#The random walk mode excludes odor detection
masks = {'startmode': mask_startmode, 'flyupwd': mask_flyupwd}
# Update state for flies detecting odor plumes
self.update_for_odor_detection(dt, odor,odor_field, wind_uvecs, masks)
#----Update the masks-----
mask_release = t > self.param['release_time']
mask_startmode = mask_release & (self.mode == self.Mode_StartMode)
mask_flyupwd = mask_release & (self.mode == self.Mode_FlyUpWind)
mask_trapped = self.mode == self.Mode_Trapped
# Update position based on mode and current velocities
''' (4) Update the positions with the new velocities'''
''' (a) Update the positions with general direction excluding par/perp slip component'''
self.update_positions(mask_release,mask_trapped,mask_startmode,x_wind,y_wind,dt)
#Michael's idea 2/27/18: apply wind slippage according to c_1*(component
#parallel to fly's velocity) + c2*(component pe rp to fly's velocity)
''' (b) Update the positions with the perp slip component'''
self.update_par_perp_comps(t,wind_field,mask_release,mask_startmode,
mask_trapped)
#par/perp comps for flies not {released and in fly mode} are set to 0.
c1 = self.parallel_coeff
c2 = self.perp_coeff
self.x_position[mask_startmode] += dt*(c1*self.par_wind[0,mask_startmode]+c2*self.perp_wind[0,mask_startmode])
self.y_position[mask_startmode] += dt*(c1*self.par_wind[1,mask_startmode]+c2*self.perp_wind[1,mask_startmode])
def update_for_odor_detection(self, dt, odor, odor_field, wind_uvecs, masks):
#Worth noting that the shape of the vector odor by the time it's passed
#into this function by the update() function has been masked to exclude
#pre-release flies and trapped flies.
"""
Update simulation for odor detection
* For flies in StartMode, compute their LogisticProbPlume value.
* Test if they detect odor (Bernoulli draw).
* If they do detect odor change their mode to FlyUpWind.
* set x and y velocities to upwind at speed
"""
x_wind_unit = wind_uvecs['x']
y_wind_unit = wind_uvecs['y']
mask_startmode = masks['startmode']
mask_gt_upper = np.random.binomial(1,odor,size=np.shape(odor)).astype(bool)
#we also want to know which flies are in the band but didn't "get their card drawn"
#(the flies that we want to stop from detecting in the next couple frames to
# avoid double drawing problems):
mask_in_odor_band = odor>0.01
# print('----------'+str(np.sum(mask_in_odor_band))+'----------')
already_drawn = (mask_in_odor_band & self.mask_in_odor_band_last_step)
mask_change = mask_gt_upper & (mask_startmode) & (~already_drawn)
self.mode[mask_change] = self.Mode_FlyUpWind
# Set x and y velocities for the flies which just changed to FlyUpWind.
speed = self.param['flight_speed'][mask_change]
self.x_velocity[mask_change] = -speed*x_wind_unit[mask_change]
self.y_velocity[mask_change] = -speed*y_wind_unit[mask_change]
self.mask_in_odor_band_last_step = mask_in_odor_band
def update_for_in_trap(self, t, traps): #******
"""
Update simulation for flies in traps.
* If flies are in traps. If so record trap info and time.
"""
sources = traps.param['source_locations'] #Of format [(0,0),]
for trap_num, trap_loc in enumerate(sources):
dist_vals = distance((self.x_position, self.y_position),trap_loc)
mask_trapped = dist_vals < traps.param['trap_radius']
self.mode[mask_trapped] = self.Mode_Trapped
self.trap_num[mask_trapped] = trap_num
self.x_trap_loc[mask_trapped] = trap_loc[0]
self.y_trap_loc[mask_trapped] = trap_loc[1]
# Get time stamp for newly trapped flies
mask_newly_trapped = mask_trapped & (self.t_in_trap == scipy.inf)
self.t_in_trap[mask_newly_trapped] = t
#Get arrival angle for newly trapped flies
vfunc = scipy.vectorize(cartesian_to_polar)
xvels,yvels = self.x_velocity[mask_newly_trapped],self.y_velocity[mask_newly_trapped]
if scipy.size(xvels)>0:
_,thetas = vfunc(xvels,yvels)
thetas = (thetas+scipy.pi)%(2*scipy.pi)
self.angle_in_trap[mask_newly_trapped] = thetas
#Stop the flies trapped
self.x_velocity[mask_trapped] = 0.0
self.y_velocity[mask_trapped] = 0.0
def get_time_trapped(self,trap_num=None,straight_shots=False):
#adjusted this function to isolate flies that went straight to traps
mask_trapped = self.mode == self.Mode_Trapped
if straight_shots:
mask_trapped = mask_trapped & scipy.logical_not(self.ever_tracked)
if trap_num is None:
return self.t_in_trap[mask_trapped]
else:
mask_trapped_in_num = mask_trapped & (self.trap_num == trap_num)
return self.t_in_trap[mask_trapped_in_num]
def get_angle_trapped(self,trap_num,time_window):
mask_trapped = self.mode == self.Mode_Trapped
mask_trapped_in_num = mask_trapped & (self.trap_num == trap_num)
if not(time_window==[]):
#This case returns angle trapped of those trapped in a given time window
time_bool = (self.t_in_trap>=time_window[0]) & (self.t_in_trap<=time_window[1])
mask_trapped_in_num = mask_trapped_in_num & time_bool
return self.angle_in_trap[mask_trapped_in_num]
def get_trap_nums(self):
mask_trap_num_set = self.trap_num != -1
trap_num_array = scipy.unique(self.trap_num[mask_trap_num_set])
trap_num_array.sort()
return list(trap_num_array)
def list_all_traps(self):
return(range(self.num_traps))
def get_trap_counts(self):
mask_trap_num_set = self.trap_num != -1
(trap_num_array,trap_counts)=scipy.unique(
self.trap_num[mask_trap_num_set],return_counts = True)
all_trap_counts = scipy.zeros(self.num_traps)
all_trap_counts[trap_num_array] = trap_counts
return all_trap_counts
def update_positions(self,mask_release,mask_trapped,mask_startmode,x_wind,y_wind,dt):
if self.start_type=='fh' or sum(mask_startmode)<1.:
mask_move = mask_release & (~mask_trapped)
print('number moving: '+str(scipy.sum(mask_move)))
if self.param['pure_advection']:
self.x_position[mask_startmode] += dt*x_wind[mask_startmode]
self.y_position[mask_startmode] += dt*y_wind[mask_startmode]
self.x_position[mask_move& (~mask_startmode)] += dt*self.x_velocity[mask_move& (~mask_startmode)]
self.y_position[mask_move& (~mask_startmode)] += dt*self.y_velocity[mask_move& (~mask_startmode)]
elif self.param['airspeed_saturation']:
wind_par = self.par_wind[:,mask_move&mask_startmode]
wind_par_mags = np.sqrt(np.sum(wind_par*wind_par,axis=0))
wind_par_signs = np.sign(np.sum(wind_par*
(np.vstack((self.x_velocity[mask_move&mask_startmode],self.y_velocity[
mask_move&mask_startmode])))
,axis=0))
signed_wind_par_mags = wind_par_mags*wind_par_signs
flight_speed = self.param['flight_speed'][0]
# New version: a sigmoidal smoothing of the above
adjusted_mag = speed_sigmoid_func(signed_wind_par_mags)
adjusted_mag = adjusted_mag/flight_speed #normalization to unit mag
# plt.figure()
# plt.plot(signed_wind_par_mags,adjusted_mag,'o')
# plt.show()
# raw_input()
self.x_position[mask_move&mask_startmode] += dt*adjusted_mag*self.x_velocity[mask_move&mask_startmode]
self.y_position[mask_move&mask_startmode] += dt*adjusted_mag*self.y_velocity[mask_move&mask_startmode]
self.x_position[mask_move&(~mask_startmode)] += dt*self.x_velocity[mask_move&(~mask_startmode)]
self.y_position[mask_move&(~mask_startmode)] += dt*self.y_velocity[mask_move&(~mask_startmode)]
else:
self.x_position[mask_move] += dt*self.x_velocity[mask_move]
self.y_position[mask_move] += dt*self.y_velocity[mask_move]
elif self.start_type=='rw':
#The flies who are not in start_mode move the same way
mask_move = mask_release & (~mask_trapped) & (~mask_startmode)
self.x_position[mask_move] += dt*self.x_velocity[mask_move]
self.y_position[mask_move] += dt*self.y_velocity[mask_move]
'''Option 1: path lengths are chosen from a heavy-tailed distribution'''
'''For those in startmode, the x step and y step of each fly is chosen from lognormal (heavy-tailed) distribution
right now the distribution is manually set up so that moving faster than peak velocity (1.8 m/s) happens with close to
0 probability: the distribution is lognormal with sigma = 0.25 and mu = 0, and then *(1.8/2.0)*timestep'''
sigma = 0.25
mu = 0
scaling_factor = (1.8/2.0)*dt #So that 1.8 m/s is the fastest it ever flies
draws = sum(mask_startmode)
self.x_position[mask_startmode] += scaling_factor*scipy.random.choice([1,-1],
size=draws)*scipy.stats.lognorm.rvs(sigma,size=draws,scale=scipy.exp(mu))
self.y_position[mask_startmode] += scaling_factor*scipy.random.choice([1,-1],
size=draws)*scipy.stats.lognorm.rvs(sigma,size=draws,scale=scipy.exp(mu))
elif self.start_type=='cvrw':
start = time.time()
#All flies update position according to velocity, including start_mode flies
mask_move = mask_release & (~mask_trapped)
self.x_position[mask_move] += dt*self.x_velocity[mask_move]
self.y_position[mask_move] += dt*self.y_velocity[mask_move]
'''Option 2: Durations of a given direction are chosen from a heavy-tailed distribution
Draw from same kind of distribution as above, but round up for discrete time steps.
Distribution is lognormal with sigma = 0.5 and mu = 0, and then *(300/3.0)/timestep,
which makes the max occuring duration around 300 s= 5 min.'''
sigma = 0.5
mu = 0.
#Every startmode fly has one time step less left in current direction
self.increments_until_turn[mask_startmode&mask_move] -=1
#print(self.increments_until_turn[mask_startmode&mask_move])
#Flies whose time is up get assigned a new direction --> x and y velocity
mask_redraw = mask_move&mask_startmode & (self.increments_until_turn == 0)
cp = time.time()
draws = sum(mask_redraw)
if draws>0:
#directions = scipy.random.choice(self.uniform_directions_pool,draws)
sines, cosines = scipy.zeros(draws),scipy.zeros(draws)
for x in xrange(draws):
direction = scipy.stats.uniform.rvs(0.0,2*scipy.pi)
sines[x],cosines[x] = scipy.sin(direction),scipy.cos(direction)
#cp1 = time.time();print('cp1 :'+str(cp1-cp))
#self.x_velocity[mask_redraw]=self.param['flight_speed'][0]*scipy.cos(directions)
self.x_velocity[mask_redraw]=self.param['flight_speed'][0]*cosines
#cp2 = time.time();print(cp2-cp1)
self.y_velocity[mask_redraw]=self.param['flight_speed'][0]*sines
#self.y_velocity[mask_redraw]=self.param['flight_speed'][0]*scipy.sin(directions)
#cp3 = time.time();print(cp3-cp2)
#and get assigned a fnew interval count until they change direction again
self.increments_until_turn[mask_redraw] = scipy.floor(#scipy.random.choice(self.increments_pool,sum(mask_redraw))
scipy.stats.lognorm.rvs(sigma,size=draws,scale=
(300/3.0)/dt*
scipy.exp(mu)))
#cp4 = time.time(); print(cp4-cp3)
#print(cp4-cp)
#Once positions are updated, update the variable that keeps track of distance to origin
self.distance_to_origin = scipy.sqrt(self.x_position**2+self.y_position**2)
def get_par_perp_comps(self,t,wind_field,mask):
x_wind, y_wind = wind_field.value(
t,self.x_position[mask], self.y_position[mask])
wind = scipy.array([x_wind,y_wind])
velocity = scipy.array([
self.x_velocity[mask],self.y_velocity[mask]])
par_vec = scipy.zeros(scipy.shape(velocity))
perp_vec = scipy.zeros(scipy.shape(velocity))
for i in range(scipy.size(velocity,1)):
u,v = velocity[:,i],wind[:,i]
par,perp = par_perp(v,u)
par_vec[:,i],perp_vec[:,i] = par,perp
return par_vec,perp_vec
def update_par_perp_comps(self,t,wind_field,mask_release,mask_startmode,mask_trapped):
#Check if the wind field has changed since last time-step, if so, re-compute get_par_perp_comps
if wind_field.evolving:
mask_not_stuck = scipy.logical_not(mask_trapped)
self.par_wind[:,mask_not_stuck],self.perp_wind[:,mask_not_stuck] = \
self.get_par_perp_comps(t,wind_field,mask_not_stuck)
#Set the flies who have been released and who are not in start_mode to zero par and zero perp
self.par_wind[:,mask_release&~mask_startmode] = 0.
self.perp_wind[:,mask_release&~mask_startmode] = 0.
#Set the flies who have not been released to zero par and zero perp
# self.par_wind[:,~mask_release] = 0.
# self.perp_wind[:,~mask_release] = 0.
| 50.837951
| 198
| 0.645141
| 8,258
| 60,548
| 4.495398
| 0.075684
| 0.024998
| 0.016162
| 0.021119
| 0.816583
| 0.789645
| 0.778897
| 0.761334
| 0.747057
| 0.729655
| 0
| 0.009465
| 0.253155
| 60,548
| 1,190
| 199
| 50.880672
| 0.811477
| 0.202401
| 0
| 0.755495
| 0
| 0
| 0.052426
| 0.002871
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042582
| false
| 0.002747
| 0.021978
| 0.005495
| 0.104396
| 0.020604
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00c69056d5cfcecee58289a40a3996541beb3c08
| 8,522
|
py
|
Python
|
third_party_package/RDKit_2015_03_1/rdkit/Chem/FeatMaps/UnitTestFeatMapUtils.py
|
Ivy286/cluster_basedfps
|
7fc216537f570436f008ea567c137d03ba2b6d81
|
[
"WTFPL"
] | 9
|
2019-04-23T01:46:12.000Z
|
2021-08-16T07:07:12.000Z
|
third_party_package/RDKit_2015_03_1/rdkit/Chem/FeatMaps/UnitTestFeatMapUtils.py
|
Ivy286/cluster_basedfps
|
7fc216537f570436f008ea567c137d03ba2b6d81
|
[
"WTFPL"
] | null | null | null |
third_party_package/RDKit_2015_03_1/rdkit/Chem/FeatMaps/UnitTestFeatMapUtils.py
|
Ivy286/cluster_basedfps
|
7fc216537f570436f008ea567c137d03ba2b6d81
|
[
"WTFPL"
] | 5
|
2016-09-21T03:47:48.000Z
|
2019-07-30T22:17:35.000Z
|
# $Id$
#
# Copyright (C) 2006 greg Landrum
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
from rdkit import RDConfig
import unittest,sys,os,math
from rdkit import Chem
from rdkit.Chem.FeatMaps import FeatMaps,FeatMapParser,FeatMapUtils
from rdkit.Chem.FeatMaps.FeatMapPoint import FeatMapPoint
from rdkit.Geometry import Point3D
def feq(n1,n2,tol=1e-4):
return abs(n1-n2)<=tol
def pteq(p1,p2,tol=1e-4):
return feq((p1-p2).LengthSq(),0.0,tol)
class TestCase(unittest.TestCase):
def setUp(self):
self.paramTxt="""
BeginParams
family=Acceptor radius=0.5 profile=Box
EndParams
"""
self.p = FeatMapParser.FeatMapParser()
def test1Basics(self):
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.1, 0.0, 0.0) weight=1.0
family=Acceptor pos=(3.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertFalse(FeatMapUtils.MergeFeatPoints(fm1))
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance))
self.assertTrue(fm1.GetNumFeatures()==2)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.05,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(3.0,0,0)))
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.1, 0.0, 0.0) weight=1.0
family=Acceptor pos=(3.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(4.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance))
self.assertTrue(fm1.GetNumFeatures()==2)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.05,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(3.5,0,0)))
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.2, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.3, 0.0, 0.0) weight=1.0
family=Acceptor pos=(4.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance))
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.00,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.25,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(4.0,0,0)))
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.2, 0.0, 0.0) weight=3.0
family=Acceptor pos=(1.3, 0.0, 0.0) weight=1.0
family=Acceptor pos=(4.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance,
mergeMethod=FeatMapUtils.MergeMethod.Average))
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.00,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.25,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(4.0,0,0)))
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance,
mergeMethod=FeatMapUtils.MergeMethod.WeightedAverage))
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.00,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.225,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(4.0,0,0)))
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance,
mergeMethod=FeatMapUtils.MergeMethod.UseLarger))
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.00,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.2,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(4.0,0,0)))
def _test1BasicsRepeated(self):
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(0.7, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.2, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.3, 0.0, 0.0) weight=1.0
family=Acceptor pos=(4.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==5)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance))
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(0.7,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.0,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(1.25,0,0)))
self.assertTrue(pteq(fm1.GetFeature(3).GetPos(),Point3D(4.0,0,0)))
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance))
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(0.7,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.125,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(4.0,0,0)))
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Distance))
self.assertTrue(fm1.GetNumFeatures()==2)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(0.9125,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(4.0,0,0)))
def test2ScoreBasics(self):
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.2, 0.0, 0.0) weight=3.0
family=Acceptor pos=(4.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Overlap,
mergeMethod=FeatMapUtils.MergeMethod.Average))
self.assertTrue(fm1.GetNumFeatures()==2)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.1,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(4.0,0,0)))
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.1, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.3, 0.0, 0.0) weight=3.0
family=Acceptor pos=(4.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Overlap,
mergeMethod=FeatMapUtils.MergeMethod.Average))
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.15,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.1,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(4.0,0,0)))
txt = self.paramTxt+"""
BeginPoints
family=Acceptor pos=(1.0, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.2, 0.0, 0.0) weight=1.0
family=Acceptor pos=(1.6, 0.0, 0.0) weight=3.0
family=Acceptor pos=(4.0, 0.0, 0.0) weight=1.0
EndPoints
"""
self.p.SetData(txt)
fm1 = self.p.Parse()
self.assertTrue(fm1.GetNumFeatures()==4)
self.assertTrue(FeatMapUtils.MergeFeatPoints(fm1,FeatMapUtils.MergeMetric.Overlap,
mergeMethod=FeatMapUtils.MergeMethod.Average))
self.assertTrue(fm1.GetNumFeatures()==3)
self.assertTrue(pteq(fm1.GetFeature(0).GetPos(),Point3D(1.0,0,0)))
self.assertTrue(pteq(fm1.GetFeature(1).GetPos(),Point3D(1.4,0,0)))
self.assertTrue(pteq(fm1.GetFeature(2).GetPos(),Point3D(4.0,0,0)))
if __name__ == '__main__':
unittest.main()
| 39.271889
| 103
| 0.675194
| 1,263
| 8,522
| 4.548694
| 0.088678
| 0.054656
| 0.048042
| 0.03342
| 0.868581
| 0.868581
| 0.868581
| 0.867363
| 0.859182
| 0.859008
| 0
| 0.081122
| 0.150904
| 8,522
| 216
| 104
| 39.453704
| 0.712825
| 0.028984
| 0
| 0.761111
| 0
| 0.172222
| 0.218686
| 0
| 0
| 0
| 0
| 0
| 0.377778
| 1
| 0.033333
| false
| 0
| 0.033333
| 0.011111
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00cc59ae50140ffa9b8b10b06ef85404de3803ae
| 116,999
|
py
|
Python
|
Octavo/Assembler/archive/array_scalar.py
|
laforest/Octavo
|
e3041ad98c58eeff1f59e65b01da1eb458a0d591
|
[
"BSD-2-Clause"
] | 63
|
2015-03-16T08:37:44.000Z
|
2021-11-15T00:35:14.000Z
|
Octavo/Assembler/archive/array_scalar.py
|
laforest/Octavo
|
e3041ad98c58eeff1f59e65b01da1eb458a0d591
|
[
"BSD-2-Clause"
] | 55
|
2015-01-11T02:05:57.000Z
|
2019-03-20T00:12:32.000Z
|
Octavo/Assembler/archive/array_scalar.py
|
laforest/Octavo
|
e3041ad98c58eeff1f59e65b01da1eb458a0d591
|
[
"BSD-2-Clause"
] | 13
|
2016-05-13T10:23:45.000Z
|
2021-11-15T00:35:13.000Z
|
#! /usr/bin/python
import empty
from opcodes import *
from memory_map import mem_map
from branching_flags import *
bench_dir = "Array_Scalar"
bench_file = "array_scalar"
bench_name = bench_dir + "/" + bench_file
SIMD_bench_name = bench_dir + "/" + "SIMD_" + bench_file
# Get empty instances with default parameters
empty = empty.assemble_all()
def assemble_PC():
# Nothing to do here.
PC = empty["PC"]
PC.file_name = bench_name
return PC
def assemble_A():
A = empty["A"]
A.file_name = bench_name
A.P("A_IO", mem_map["A"]["IO_base"])
A.A(0)
A.L(0)
A.L(1), A.N("one")
A.L(-1), A.N("minus_one")
# Placeholders for branch table entries
A.L(0), A.N("jmp0")
A.L(0), A.N("jmp0a")
A.L(0), A.N("jmp1")
A.L(0), A.N("jmp2")
A.L(0), A.N("jmp3")
return A
def assemble_B():
B = empty["B"]
B.file_name = bench_name
B.P("B_IO", mem_map["B"]["IO_base"])
B.P("loop_pointer", mem_map["B"]["PO_INC_base"], write_addr = mem_map["H"]["PO_INC_base"])
B.A(0)
B.L(0)
B.L(10), B.N("loop_count_init")
B.L(0), B.N("loop_count")
B.L(0), B.N("temp")
B.L(0), B.N("array")
B.L(1)
B.L(2)
B.L(3)
B.L(4)
B.L(5)
B.L(6)
B.L(7)
B.L(8)
B.L(9)
B.L(-1)
# Placeholders for programmed offset
B.L(0), B.N("loop_pointer_init")
return B
def assemble_I(PC, A, B):
I = empty["I"]
I.file_name = bench_name
# How would MIPS do it? Ideal case: no load or branch delay slots, full result forwarding
#
# init: ADD loop_count, loop_count_init, 0
# outer: ADD loop_pointer, loop_pointer_init, 0
# inner: LW temp, loop_pointer
# BLTZ break, temp
# ADD temp, temp, 1
# SW temp, loop_pointer
# ADD loop_pointer, loop_pointer, 1
# JMP inner
# break: SUB loop_count, loop_count, 1
# BGTZ outer, loop_count
# ADD loop_pointer, loop_pointer_init, 0
# output: LW temp, loop_pointer
# BLTZ init, temp
# SW temp, output_port
# ADD loop_pointer, loop_pointer, 1
# JMP output
# Thread 0 has implicit first NOP from pipeline, so starts at 1
# All threads start at 1, to avoid triggering branching unit at 0.
I.A(1)
# Instructions to fill branch table
branch_base_addr = mem_map["BO"]["Origin"]
branch_depth = mem_map["BO"]["Depth"]
I.I(ADD, branch_base_addr, "jmp0", 0)
I.I(ADD, branch_base_addr + branch_depth, "jmp1", 0)
I.I(ADD, branch_base_addr + (branch_depth * 2), "jmp2", 0)
I.I(ADD, branch_base_addr + (branch_depth * 3), "jmp3", 0)
#################################################################################################################################
# Overhead version
# PO_base_addr = mem_map["BPO"]["Origin"]
# I.I(ADD, branch_base_addr, "jmp0", 0), I.N("init") # init: ADD loop_count, loop_count_init, 0
# I.I(ADD, "loop_count", 0, "loop_count_init") # !!! ^^^
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init"), I.N("outer") # outer: ADD loop_pointer, loop_pointer_init, 0
# I.NOP(), I.N("inner1") # !!!
# I.I(ADD, "temp", 0, "loop_pointer"), I.N("inner2") # inner: LW temp, loop_pointer
# I.NOP(), I.JNE("break", None, "jmp0") # BLTZ break, temp
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.NOP(), I.JMP("inner2", "jmp1") # JMP inner
# I.I(ADD, "loop_count", "minus_one", "loop_count"), I.N("break") # break: SUB loop_count, loop_count, 1
# I.NOP(), I.JNZ("outer", None, "jmp2") # BGTZ outer, loop_count
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, branch_base_addr, "jmp0a", 0) # !!!
# I.I(ADD, "temp", 0, "loop_pointer"), I.N("output") # output: LW temp, loop_pointer
# I.NOP(), I.JNE("init", None, "jmp3") # BLTZ init, temp
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.NOP(), I.JMP("output", "jmp0a") # JMP output
# Experiment:
# Code size: 19 instructions
# 34 passes over array of 10 elements, 10 times, over 200,000 simulation cycles
# Cycles: 194792 - 40 = 194752
# Useful cycles: 194752 / 8 = 24344
# Cycles per pass: 24344 / 34 = 716
# PC Tally (Revised)
#
# 1 1 # setup
# 1 2 # setup
# 1 3 # setup
# 1 4 # setup
# 35 5 # N !!!
# 35 6 # N
# 350 7 # N U
# 350 8 # N !!!
# 3848 9 # U
# 3848 10 # N
# 3499 11 # U
# 3499 12 # U
# 3499 13 # N U
# 3498 14 # N
# 349 15 # N
# 349 16 # N
# 34 17 # N U
# 34 18 # N !!!
# 374 19 # U
# 374 20 # N
# 340 21 # U
# 340 22 # N U
# 340 23 # N
#
# Useful: 340 + 34 + 3499 + 350 + 3848 + 3499 + 3499 + 374 + 340 = 15783
# Not Useful: 35 + 35 + 350 + 3848 + 3498 + 349 + 349 + 34 + 374 + 340 = 9212
# Total: 24995
# ALU efficiency: 15783 / 24995 = 0.63145
#################################################################################################################################
# Efficient version
# PO_base_addr = mem_map["BPO"]["Origin"]
# I.I(ADD, branch_base_addr, "jmp0", 0), I.N("init")
# I.I(ADD, "loop_count", 0, "loop_count_init")
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init"), I.N("outer") # un-branched-to
# I.NOP(), I.N("inner1")
# I.I(ADD, "temp", 0, "loop_pointer"), I.N("inner2")
# I.I(ADD, "temp", "one", "temp"), I.JNE("break", False, "jmp0")
# I.I(ADD, "loop_pointer", 0, "temp"), I.JMP("inner2", "jmp1")
# I.I(ADD, "loop_count", "minus_one", "loop_count"), I.N("break")
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init"), I.JNZ("inner1", None, "jmp2")
# I.I(ADD, branch_base_addr, "jmp0a", 0)
# I.I(ADD, "temp", 0, "loop_pointer"), I.N("output")
# I.I(ADD, "A_IO", 0, "temp"), I.N("output2"), I.JNE("init", False, "jmp3"), I.JPO("output", None, "jmp0a")
# #I.I(ADD, "loop_pointer", 0, "temp"), I.N("output2"), I.JNE("init", False, "jmp3")
# #I.I(ADD, "temp", 0, "loop_pointer"), I.JMP("output2", "jmp0a")
# Experiment:
# Code size: 12 instructions
# 66 passes over array of 10 elements, 10 times, over 200,000 simulation cycles
# Cycles: 198568 - 40 = 198528
# Useful cycles: 198528 / 8 = 24816
# Cycles per pass: 24816 / 66 = 376
# Speedup relative to MIPS equivalent: 716 / 376 = 1.904x (or +47%)
# PC Tally (Revised)
#
# 1 1 # setup
# 1 2 # setup
# 1 3 # setup
# 1 4 # setup
# 67 5 # N
# 67 6 # N
# 67 7 # N U
# 666 8 # N
# 7315 9 # U
# 731.5 10a # U N (10% cancelled at end of loop: 7315 * 0.1 = 731.5)
# 6583.5 10b # U U (90% branch not taken: 7315 * 0.9 = 6583.5)
# 6650 11 # U
# 665 12 # N
# 665 13 # N U
# 66 14 # N
# 726 15 # U
# 72.6 16a # U N (10% cancelled at end of loop: 726 * 0.1 = 72.6)
# 653.4 16b # U U (90% branch not taken: 726 * 0.9 = 653.4)
#
# Useful Total: 67 + 7315 + 6583.5 + 6650 + 665 + 726 + 653.4 = 22659.9
# Not USeful Total: 67 + 67 + 666 + 731.5 + 665 + 66 + 72.6 = 2335.1
# Total: 24995 (includes runt pass at end)
# ALU efficiency: 22659.9 / 24995 = 0.90658
#################################################################################################################################
# Efficient Unrolled
PO_base_addr = mem_map["BPO"]["Origin"]
I.I(ADD, PO_base_addr, 0, "loop_pointer_init"), I.N("init") # outer: ADD loop_pointer, loop_pointer_init, 0
I.NOP(), # !!!
# 01 ----------------------------------------------------------------------------------------------------------------------------
#I.I(ADD, PO_base_addr, 0, "loop_pointer_init"), I.N("outer") # outer: ADD loop_pointer, loop_pointer_init, 0
#I.NOP(), # !!!
I.I(ADD, "temp", 0, "loop_pointer"), I.N("outer") # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 02 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 03 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 04 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 05 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 06 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 07 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 08 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 09 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# 10 ----------------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# OUTPUT ---------------------------------------------------------------------------------------------------------------------
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
I.I(ADD, "A_IO", 0, "temp"), I.JMP("outer", "jmp0") # SW temp, output_port
# Experiment:
# Code size: 331 instructions
# 75 passes over array of 10 elements, 10 times, over 200,000 simulation cycles
# Cycles: 198656 - 56 = 198600
# Useful cycles: 198656 / 8 = 24825
# Cycles per pass: 24825 / 75 = 331
# PC Tally (0 and 1023 are not counted)
# 1-6: 1
# 7-174: 76 (times 168 = 12768)
# 175-337: 75 (times 163 = 12225)
# Total: 24993 all
# ALL instructions useful (first 2 are insignificant error), since JMP folded
# ALU efficiency: 1.00
#################################################################################################################################
## Overhead Unrolled
# PO_base_addr = mem_map["BPO"]["Origin"]
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init"), I.N("init") # outer: ADD loop_pointer, loop_pointer_init, 0
# I.NOP(), # !!!
# # 01 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), I.N("outer") # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 02 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 03 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 04 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 05 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 06 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 07 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 08 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 09 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # 10 ----------------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # inner: LW temp, loop_pointer
# I.I(ADD, "temp", "one", "temp") # ADD temp, temp, 1
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "loop_pointer", 0, "temp") # SW temp, loop_pointer
# # OUTPUT ---------------------------------------------------------------------------------------------------------------------
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, "A_IO", 0, "temp") # SW temp, output_port
# I.NOP() # ADD loop_pointer, loop_pointer, 1
# I.I(ADD, "temp", 0, "loop_pointer"), # output: LW temp, loop_pointer
# I.I(ADD, PO_base_addr, 0, "loop_pointer_init") # ADD loop_pointer, loop_pointer_init, 0
# I.I(ADD, "A_IO", 0, "temp"), # SW temp, output_port
# I.NOP(), I.JMP("outer", "jmp0") # JMP output
# Experiment
# 57 passes over array of 10 elements, 10 times, over 200,000 simulation cycles
# Cycles: 196592 - 56 = 196536
# Useful Cycles: 196536 / 8 = 24567
# Cycles per pass: 24567 / 57 = 431
# Speedup over Efficient Unrolled: 331 / 431 = 0.768
# PC Tally:
# Over 196536, each instruction runs 57 times
# All useful except:
# final JMP: 57 cycles
# Useful: 196536 - 57 = 196479
# Ratio: 196479 / 196536 = 0.99971
# Resolve jumps
I.resolve_forward_jumps()
# Set programmed offsets
read_PO = (mem_map["B"]["Depth"] - mem_map["B"]["PO_INC_base"] + B.R("array")) & 0x3FF
write_PO = (mem_map["H"]["Origin"] + mem_map["H"]["Depth"] - mem_map["H"]["PO_INC_base"] + B.W("array")) & 0xFFF
PO = (1 << 34) | (1 << 32) | (write_PO << 20) | read_PO
B.A(B.R("loop_pointer_init"))
B.L(PO)
# Since the next indirect memory address is one further down
#read_PO -= 1
#write_PO -= 1
#PO = (1 << 34) | (1 << 32) | (write_PO << 20) | read_PO
#B.A(B.R("output_pointer_init"))
#B.L(PO)
return I
# Leave these all zero for now: only zero-based thread will do something, all
# others will hang at 0 due to empty branch tables.
def assemble_XDO():
ADO, BDO, DDO = empty["ADO"], empty["BDO"], empty["DDO"]
ADO.file_name = bench_name
BDO.file_name = bench_name
DDO.file_name = bench_name
return ADO, BDO, DDO
def assemble_XPO():
APO, BPO, DPO = empty["APO"], empty["BPO"], empty["DPO"]
APO.file_name = bench_name
BPO.file_name = bench_name
DPO.file_name = bench_name
return APO, BPO, DPO
def assemble_XIN():
AIN, BIN, DIN = empty["AIN"], empty["BIN"], empty["DIN"]
AIN.file_name = bench_name
BIN.file_name = bench_name
DIN.file_name = bench_name
return AIN, BIN, DIN
def assemble_branches():
BO, BD, BC, BP, BPE = empty["BO"], empty["BD"], empty["BC"], empty["BP"], empty["BPE"]
BO.file_name = bench_name
BD.file_name = bench_name
BC.file_name = bench_name
BP.file_name = bench_name
BPE.file_name = bench_name
return BO, BD, BC, BP, BPE
def assemble_all():
PC = assemble_PC()
A = assemble_A()
B = assemble_B()
I = assemble_I(PC, A, B)
ADO, BDO, DDO = assemble_XDO()
APO, BPO, DPO = assemble_XPO()
AIN, BIN, DIN = assemble_XIN()
BO, BD, BC, BP, BPE = assemble_branches()
hailstone = {"PC":PC, "A":A, "B":B, "I":I,
"ADO":ADO, "BDO":BDO, "DDO":DDO,
"APO":APO, "BPO":BPO, "DPO":DPO,
"AIN":AIN, "BIN":BIN, "DIN":DIN,
"BO":BO, "BD":BD, "BC":BC, "BP":BP, "BPE":BPE}
return hailstone
def dump_all(hailstone):
for memory in hailstone.values():
memory.file_dump()
if __name__ == "__main__":
hailstone = assemble_all()
dump_all(hailstone)
| 103.999111
| 147
| 0.301438
| 10,051
| 116,999
| 3.360561
| 0.031937
| 0.374515
| 0.102585
| 0.113776
| 0.894455
| 0.884448
| 0.881103
| 0.875892
| 0.868165
| 0.86473
| 0
| 0.032899
| 0.569518
| 116,999
| 1,124
| 148
| 104.091637
| 0.637732
| 0.691356
| 0
| 0.734375
| 0
| 0
| 0.150652
| 0
| 0
| 0
| 0.000293
| 0
| 0
| 1
| 0.022321
| false
| 0
| 0.008929
| 0
| 0.051339
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
97063b24e4b142eaba46da5abd9fa79fe1cefa7a
| 5,588
|
py
|
Python
|
Latest/venv/Lib/site-packages/pyface/tasks/tests/test_task_window.py
|
adamcvj/SatelliteTracker
|
49a8f26804422fdad6f330a5548e9f283d84a55d
|
[
"Apache-2.0"
] | 1
|
2022-01-09T20:04:31.000Z
|
2022-01-09T20:04:31.000Z
|
Latest/venv/Lib/site-packages/pyface/tasks/tests/test_task_window.py
|
adamcvj/SatelliteTracker
|
49a8f26804422fdad6f330a5548e9f283d84a55d
|
[
"Apache-2.0"
] | 1
|
2022-02-15T12:01:57.000Z
|
2022-03-24T19:48:47.000Z
|
Latest/venv/Lib/site-packages/pyface/tasks/tests/test_task_window.py
|
adamcvj/SatelliteTracker
|
49a8f26804422fdad6f330a5548e9f283d84a55d
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from traits.testing.unittest_tools import UnittestTools
from pyface.tasks.api import Task
from ..task_window import TaskWindow
def _task_window_with_named_tasks(*names, **kwargs):
tasks = [Task(name=name) for name in names]
first_active = kwargs.pop('first_active', False)
if first_active:
kwargs['active_task'] = tasks[0]
task = TaskWindow(tasks=tasks, **kwargs)
return task
class TestTaskWindow(unittest.TestCase, UnittestTools):
def test_title_default(self):
task_window = TaskWindow()
# default is empty
self.assertEqual(task_window.title, '')
def test_title_no_active_task(self):
task_window = _task_window_with_named_tasks(
'Test Task', 'Test Task 2')
# should be empty
self.assertEqual(task_window.title, '')
def test_title_activate_task(self):
task_window = _task_window_with_named_tasks('Test Task')
task = task_window.tasks[0]
# activate task
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.active_task = task
self.assertEqual(task_window.title, 'Test Task')
def test_title_change_active_task_name(self):
task_window = _task_window_with_named_tasks(
'Test Task', first_active=True)
task_1 = task_window.tasks[0]
# change task name
with self.assertTraitChanges(task_window, 'title', count=1):
task_1.name = 'Changed Name'
self.assertEqual(task_window.title, 'Changed Name')
def test_title_change_active_task(self):
task_window = _task_window_with_named_tasks(
'Test Task 1', 'Test Task 2', first_active=True)
task = task_window.tasks[1]
# change active task
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.active_task = task
self.assertEqual(task_window.title, 'Test Task 2')
def test_title_change_deactivate_task(self):
task_window = _task_window_with_named_tasks(
'Test Task 1', first_active=True)
# change active task
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.active_task = None
self.assertEqual(task_window.title, '')
def test_set_title_no_tasks(self):
task_window = _task_window_with_named_tasks()
# set window title
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.title = "Window title"
self.assertEqual(task_window.title, "Window title")
def test_set_title_change_title(self):
task_window = _task_window_with_named_tasks(title="Window Title")
# set window title
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.title = "New Window title"
self.assertEqual(task_window.title, "New Window title")
def test_set_title_no_active_task(self):
task_window = _task_window_with_named_tasks('Test Task')
# set window title
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.title = "Window title"
self.assertEqual(task_window.title, "Window title")
def test_set_title_active_task(self):
task_window = _task_window_with_named_tasks(
'Test Task', first_active=True)
# set window title
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.title = "Window title"
self.assertEqual(task_window.title, "Window title")
def test_set_title_activate_task(self):
task_window = _task_window_with_named_tasks(
'Test Task', title="Window title")
task = task_window.tasks[0]
# change activate task (trait fires, no window title change)
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.active_task = task
self.assertEqual(task_window.title, "Window title")
def test_set_title_change_active_task_name(self):
task_window = _task_window_with_named_tasks(
'Test Task', title="Window title", first_active=True)
task = task_window.tasks[0]
# change task name (trait fires, no window title change)
with self.assertTraitChanges(task_window, 'title', count=1):
task.name = 'Changed Name'
self.assertEqual(task_window.title, "Window title")
def test_set_title_change_active_task(self):
task_window = _task_window_with_named_tasks(
'Test Task', 'Test Task 2', title="Window title",
active_first=True)
task = task_window.tasks[1]
# change task name (trait fires, no window title change)
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.active_task = task
self.assertEqual(task_window.title, "Window title")
def test_reset_title_active_task(self):
task_window = _task_window_with_named_tasks(
'Test Task', title="Window title", first_active=True)
# reset window title
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.title = ""
self.assertEqual(task_window.title, "Test Task")
def test_reset_title(self):
task_window = _task_window_with_named_tasks(
'Test Task', title="Window title")
# set window title
with self.assertTraitChanges(task_window, 'title', count=1):
task_window.title = ""
self.assertEqual(task_window.title, "")
| 36.285714
| 73
| 0.669291
| 700
| 5,588
| 5.035714
| 0.08
| 0.215603
| 0.144681
| 0.080851
| 0.85078
| 0.838298
| 0.827234
| 0.76539
| 0.737589
| 0.679433
| 0
| 0.006576
| 0.23801
| 5,588
| 153
| 74
| 36.522876
| 0.821278
| 0.066929
| 0
| 0.54
| 0
| 0
| 0.098115
| 0
| 0
| 0
| 0
| 0
| 0.28
| 1
| 0.16
| false
| 0
| 0.04
| 0
| 0.22
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
973d559b8def30498f71c725d22c934cbe79ce9a
| 41
|
py
|
Python
|
tests/t62.py
|
jplevyak/pyc
|
9f4bc49be78ba29427841460945ce63826fcd857
|
[
"BSD-3-Clause"
] | 3
|
2019-08-21T22:01:35.000Z
|
2021-07-25T00:21:28.000Z
|
tests/t62.py
|
jplevyak/pyc
|
9f4bc49be78ba29427841460945ce63826fcd857
|
[
"BSD-3-Clause"
] | null | null | null |
tests/t62.py
|
jplevyak/pyc
|
9f4bc49be78ba29427841460945ce63826fcd857
|
[
"BSD-3-Clause"
] | null | null | null |
a = [1,2,3,4]
print a
b = a[2:4]
print b
| 8.2
| 13
| 0.512195
| 13
| 41
| 1.615385
| 0.538462
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 0.243902
| 41
| 4
| 14
| 10.25
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.